diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 61f5e2b513..de1944f325 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 64.5.14
+current_version = 67.0.3
commit = True
tag = True
tag_name = v{new_version}
diff --git a/.github/workflows/tests_and_coverage.yml b/.github/workflows/tests_and_coverage.yml
index 46d2f28506..a601201570 100644
--- a/.github/workflows/tests_and_coverage.yml
+++ b/.github/workflows/tests_and_coverage.yml
@@ -24,16 +24,8 @@ jobs:
with:
virtualenvs-create: false
- - name: Cache dependencies
- uses: actions/cache@v4
- id: cache
- with:
- path: ${{ env.pythonLocation }}
- key: ${{ env.pythonLocation }}-${{ hashFiles('**/poetry.lock') }}
-
- name: Install Dependencies
- if: steps.cache.outputs.cache-hit != 'true'
- run: poetry install --no-interaction
+ run: poetry install --no-interaction --all-extras
- name: Test with Pytest & Coveralls
run: |
diff --git a/Dockerfile b/Dockerfile
index 77d8acca1c..a146affc96 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -23,14 +23,16 @@ ENV TRAILBLAZER_SERVICE_ACCOUNT_AUTH_FILE="auth_file"
WORKDIR /home/src/app
-COPY pyproject.toml poetry.lock gunicorn.conf.py ./
+COPY pyproject.toml poetry.lock gunicorn.conf.py README.md ./
RUN pip install --no-cache-dir poetry \
&& poetry config virtualenvs.create false \
- && poetry install --no-interaction --no-ansi
+ && poetry install --no-interaction --no-ansi --no-root
COPY cg ./cg
+RUN poetry install --no-interaction --no-ansi
+
CMD gunicorn \
--config gunicorn.conf.py \
cg.server.auto:app
\ No newline at end of file
diff --git a/alembic/versions/2024_12_02_5552c02a4966_add_nallo_to_analysis_options.py b/alembic/versions/2024_12_02_5552c02a4966_add_nallo_to_analysis_options.py
new file mode 100644
index 0000000000..edb6c11135
--- /dev/null
+++ b/alembic/versions/2024_12_02_5552c02a4966_add_nallo_to_analysis_options.py
@@ -0,0 +1,105 @@
+"""add-nallo-to-analysis-options
+
+Revision ID: 5552c02a4966
+Revises: 05ffb5e13d7b
+Create Date: 2024-12-02 11:35:31.725343
+
+"""
+
+from enum import StrEnum
+
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects import mysql
+from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
+
+# revision identifiers, used by Alembic.
+revision = "5552c02a4966"
+down_revision = "05ffb5e13d7b"
+branch_labels = None
+depends_on = None
+
+base_options = (
+ "balsamic",
+ "balsamic-pon",
+ "balsamic-qc",
+ "balsamic-umi",
+ "demultiplex",
+ "raw-data",
+ "fluffy",
+ "microsalt",
+ "mip-dna",
+ "mip-rna",
+ "mutant",
+ "raredisease",
+ "rnafusion",
+ "rsync",
+ "spring",
+ "taxprofiler",
+ "tomte",
+ "jasen",
+)
+
+old_options = sorted(base_options)
+new_options = sorted(base_options + ("nallo",))
+
+old_analysis_enum = mysql.ENUM(*old_options)
+new_analysis_enum = mysql.ENUM(*new_options)
+
+
+class Pipeline(StrEnum):
+ BALSAMIC: str = "balsamic"
+ BALSAMIC_PON: str = "balsamic-pon"
+ BALSAMIC_QC: str = "balsamic-qc"
+ BALSAMIC_UMI: str = "balsamic-umi"
+ DEMULTIPLEX: str = "demultiplex"
+ FLUFFY: str = "fluffy"
+ JASEN: str = "jasen"
+ MICROSALT: str = "microsalt"
+ MIP_DNA: str = "mip-dna"
+ MIP_RNA: str = "mip-rna"
+ MUTANT: str = "mutant"
+ NALLO: str = "nallo"
+ RAREDISEASE: str = "raredisease"
+ RAW_DATA: str = "raw-data"
+ RNAFUSION: str = "rnafusion"
+ RSYNC: str = "rsync"
+ SPRING: str = "spring"
+ TAXPROFILER: str = "taxprofiler"
+ TOMTE: str = "tomte"
+
+
+class Base(DeclarativeBase):
+ pass
+
+
+class Analysis(Base):
+ __tablename__ = "analysis"
+ id = sa.Column(sa.types.Integer, primary_key=True)
+ workflow = sa.Column(sa.types.Enum(*list(Pipeline)))
+
+
+class Case(Base):
+ __tablename__ = "case"
+ id = sa.Column(sa.types.Integer, primary_key=True)
+ data_analysis = sa.Column(sa.types.Enum(*list(Pipeline)))
+ internal_id = sa.Column(sa.types.String)
+
+
+def upgrade():
+ op.alter_column("case", "data_analysis", type_=new_analysis_enum)
+ op.alter_column("analysis", "workflow", type_=new_analysis_enum)
+
+
+def downgrade():
+ bind = op.get_bind()
+ session = sa.orm.Session(bind=bind)
+ for analysis in session.query(Analysis).filter(Analysis.workflow == "nallo"):
+ print(f"Changing pipeline for Case {Case.internal_id} to raw-data")
+ analysis.workflow = "raw-data"
+ for case in session.query(Case).filter(Case.data_analysis == "nallo"):
+ print(f"Changing data_analysis for Case {case.internal_id} to raw-data")
+ case.data_analysis = "raw-data"
+ op.alter_column("case", "data_analysis", type_=old_analysis_enum)
+ op.alter_column("analysis", "workflow", type_=old_analysis_enum)
+ session.commit()
diff --git a/cg/__init__.py b/cg/__init__.py
index c7ef26cad4..91a7e716b8 100644
--- a/cg/__init__.py
+++ b/cg/__init__.py
@@ -1,2 +1,2 @@
__title__ = "cg"
-__version__ = "64.5.14"
+__version__ = "67.0.3"
diff --git a/cg/apps/demultiplex/demultiplex_api.py b/cg/apps/demultiplex/demultiplex_api.py
index e1eaadabf3..e4317c2f36 100644
--- a/cg/apps/demultiplex/demultiplex_api.py
+++ b/cg/apps/demultiplex/demultiplex_api.py
@@ -12,8 +12,8 @@
from cg.apps.tb import TrailblazerAPI
from cg.constants.constants import FileFormat, Workflow
from cg.constants.demultiplexing import DemultiplexingDirsAndFiles
-from cg.constants.priority import SlurmQos
-from cg.constants.tb import AnalysisTypes
+from cg.constants.priority import SlurmQos, TrailblazerPriority
+from cg.constants.tb import AnalysisType
from cg.exc import HousekeeperFileMissingError
from cg.io.controller import WriteFile
from cg.models.demultiplex.sbatch import SbatchCommand, SbatchError
@@ -49,6 +49,11 @@ def slurm_quality_of_service(self) -> Literal[SlurmQos.HIGH, SlurmQos.LOW]:
"""Return SLURM quality of service."""
return SlurmQos.LOW if self.environment == "stage" else SlurmQos.HIGH
+ @property
+ def trailblazer_priority(self) -> Literal[TrailblazerPriority.HIGH, TrailblazerPriority.LOW]:
+ """Return Trailblazer quality of service."""
+ return TrailblazerPriority.LOW if self.environment == "stage" else TrailblazerPriority.HIGH
+
def set_dry_run(self, dry_run: bool) -> None:
"""Set dry run."""
LOG.debug(f"DemultiplexingAPI: Set dry run to {dry_run}")
@@ -210,10 +215,10 @@ def add_to_trailblazer(
)
tb_api.add_pending_analysis(
case_id=sequencing_run.id,
- analysis_type=AnalysisTypes.OTHER,
+ analysis_type=AnalysisType.OTHER,
config_path=sequencing_run.trailblazer_config_path.as_posix(),
out_dir=sequencing_run.trailblazer_config_path.parent.as_posix(),
- slurm_quality_of_service=self.slurm_quality_of_service,
+ priority=self.trailblazer_priority,
email=self.mail,
workflow=Workflow.DEMULTIPLEX,
)
diff --git a/cg/apps/demultiplex/sample_sheet/api.py b/cg/apps/demultiplex/sample_sheet/api.py
index 353587f450..6da44c22d1 100644
--- a/cg/apps/demultiplex/sample_sheet/api.py
+++ b/cg/apps/demultiplex/sample_sheet/api.py
@@ -1,15 +1,15 @@
import logging
from pathlib import Path
-import click
+import rich_click as click
from cg.apps.demultiplex.sample_sheet.read_sample_sheet import get_samples_from_content
from cg.apps.demultiplex.sample_sheet.sample_models import IlluminaSampleIndexSetting
from cg.apps.demultiplex.sample_sheet.sample_sheet_creator import SampleSheetCreator
from cg.apps.demultiplex.sample_sheet.sample_sheet_validator import SampleSheetValidator
from cg.apps.demultiplex.sample_sheet.utils import (
- delete_sample_sheet_from_housekeeper,
add_and_include_sample_sheet_path_to_housekeeper,
+ delete_sample_sheet_from_housekeeper,
)
from cg.apps.housekeeper.hk import HousekeeperAPI
from cg.apps.lims import LimsAPI
@@ -160,6 +160,20 @@ def _use_sample_sheet_from_housekeeper(
"would have copied it to sequencing run directory"
)
return
+
+ try:
+ if sample_sheet_path.samefile(run_directory_data.sample_sheet_path):
+ LOG.info(
+ "Sample sheet from Housekeeper is the same as the sequencing directory sample sheet"
+ )
+ return
+ except FileNotFoundError:
+ LOG.info(
+ f"Sample sheet or target path does not exist. "
+ f"Housekeeper sample sheet path: {sample_sheet_path}, "
+ f"Target sample sheet path: {run_directory_data.sample_sheet_path}"
+ )
+
LOG.info("Sample sheet from Housekeeper is valid. Copying it to sequencing run directory")
link_or_overwrite_file(src=sample_sheet_path, dst=run_directory_data.sample_sheet_path)
diff --git a/cg/apps/hermes/hermes_api.py b/cg/apps/hermes/hermes_api.py
index 95f03be5d5..fec6968e15 100644
--- a/cg/apps/hermes/hermes_api.py
+++ b/cg/apps/hermes/hermes_api.py
@@ -15,7 +15,11 @@ class HermesApi:
"""Class to communicate with hermes"""
def __init__(self, config: dict):
- self.process = Process(binary=config["hermes"]["binary_path"])
+ self.process = Process(
+ binary=config["hermes"]["binary_path"],
+ )
+ self.container_path: str = config["hermes"]["container_path"]
+ self.container_mount_volume = config["hermes"]["container_mount_volume"]
def convert_deliverables(
self,
@@ -27,6 +31,10 @@ def convert_deliverables(
"""Convert deliverables file in raw workflow format to CG format with Hermes."""
LOG.info("Converting workflow deliverables to CG deliverables")
convert_command = [
+ "run",
+ "--bind",
+ self.container_mount_volume,
+ self.container_path,
"convert",
"deliverables",
"--workflow",
diff --git a/cg/apps/housekeeper/hk.py b/cg/apps/housekeeper/hk.py
index 2a2dd179aa..9de21d1963 100644
--- a/cg/apps/housekeeper/hk.py
+++ b/cg/apps/housekeeper/hk.py
@@ -7,11 +7,7 @@
from housekeeper.include import checksum as hk_checksum
from housekeeper.include import include_version
-from housekeeper.store.database import (
- create_all_tables,
- drop_all_tables,
- initialize_database,
-)
+from housekeeper.store.database import create_all_tables, drop_all_tables, initialize_database
from housekeeper.store.models import Archive, Bundle, File, Tag, Version
from housekeeper.store.store import Store
from sqlalchemy.orm import Query
@@ -27,17 +23,13 @@
class HousekeeperAPI:
- """API to decouple cg code from Housekeeper"""
+ """API to decouple cg code from Housekeeper."""
def __init__(self, config: dict) -> None:
initialize_database(config["housekeeper"]["database"])
self._store = Store(config["housekeeper"]["root"])
self.root_dir: str = config["housekeeper"]["root"]
- def __getattr__(self, name):
- LOG.warning(f"Called undefined {name} on {self.__class__.__name__}, please wrap")
- return getattr(self._store, name)
-
def new_bundle(self, name: str, created_at: datetime = None) -> Bundle:
"""Create a new file bundle."""
return self._store.new_bundle(name, created_at)
diff --git a/cg/apps/lims/api.py b/cg/apps/lims/api.py
index 71a66b799e..33b6f3130d 100644
--- a/cg/apps/lims/api.py
+++ b/cg/apps/lims/api.py
@@ -555,3 +555,11 @@ def _get_negative_controls_from_list(samples: list[Sample]) -> list[Sample]:
):
negative_controls.append(sample)
return negative_controls
+
+ def get_sample_region_and_lab_code(self, sample_id: str) -> str:
+ """Return the region code and lab code for a sample formatted as a prefix string."""
+ region_code: str = self.get_sample_attribute(lims_id=sample_id, key="region_code").split(
+ " "
+ )[0]
+ lab_code: str = self.get_sample_attribute(lims_id=sample_id, key="lab_code").split(" ")[0]
+ return f"{region_code}_{lab_code}_"
diff --git a/cg/apps/orderform/excel_orderform_parser.py b/cg/apps/orderform/excel_orderform_parser.py
index 796150d6c4..835701ac02 100644
--- a/cg/apps/orderform/excel_orderform_parser.py
+++ b/cg/apps/orderform/excel_orderform_parser.py
@@ -12,8 +12,8 @@
from cg.constants import DataDelivery
from cg.constants.orderforms import Orderform
from cg.exc import OrderFormError
+from cg.models.orders.constants import OrderType
from cg.models.orders.excel_sample import ExcelSample
-from cg.models.orders.order import OrderType
LOG = logging.getLogger(__name__)
diff --git a/cg/apps/orderform/json_orderform_parser.py b/cg/apps/orderform/json_orderform_parser.py
index 27c316fbc4..aadbb1ecb4 100644
--- a/cg/apps/orderform/json_orderform_parser.py
+++ b/cg/apps/orderform/json_orderform_parser.py
@@ -1,8 +1,8 @@
from cg.apps.orderform.orderform_parser import OrderformParser
from cg.constants import DataDelivery, Workflow
from cg.exc import OrderFormError
+from cg.models.orders.constants import OrderType
from cg.models.orders.json_sample import JsonSample
-from cg.models.orders.order import OrderType
class JsonOrderformParser(OrderformParser):
diff --git a/cg/apps/orderform/orderform_parser.py b/cg/apps/orderform/orderform_parser.py
index 9e84f72fe4..fa0a189655 100644
--- a/cg/apps/orderform/orderform_parser.py
+++ b/cg/apps/orderform/orderform_parser.py
@@ -4,9 +4,10 @@
from pydantic import BaseModel, ConfigDict, constr
+from cg.apps.orderform.utils import ORDER_TYPES_WITH_CASES
from cg.constants import DataDelivery
from cg.exc import OrderFormError
-from cg.models.orders.order import OrderType
+from cg.models.orders.constants import OrderType
from cg.models.orders.orderform_schema import OrderCase, Orderform, OrderPool
from cg.models.orders.sample_base import OrderSample
from cg.store.models import Customer
@@ -142,10 +143,11 @@ def expand_case(case_id: str, case_samples: list[OrderSample]) -> OrderCase:
def generate_orderform(self) -> Orderform:
"""Generate an orderform"""
- cases_map: dict[str, list[OrderSample]] = self.group_cases()
case_objs: list[OrderCase] = []
- for case_id in cases_map:
- case_objs.append(self.expand_case(case_id=case_id, case_samples=cases_map[case_id]))
+ if self.project_type in ORDER_TYPES_WITH_CASES:
+ cases_map: dict[str, list[OrderSample]] = self.group_cases()
+ for case_id in cases_map:
+ case_objs.append(self.expand_case(case_id=case_id, case_samples=cases_map[case_id]))
return Orderform(
comment=self.order_comment,
samples=self.samples,
diff --git a/cg/apps/orderform/utils.py b/cg/apps/orderform/utils.py
index 47fc6bccda..e1a93c587e 100644
--- a/cg/apps/orderform/utils.py
+++ b/cg/apps/orderform/utils.py
@@ -1,5 +1,16 @@
+from cg.models.orders.constants import OrderType
from cg.models.orders.excel_sample import ExcelSample
+ORDER_TYPES_WITH_CASES = [
+ OrderType.BALSAMIC,
+ OrderType.BALSAMIC_QC,
+ OrderType.BALSAMIC_UMI,
+ OrderType.MIP_DNA,
+ OrderType.MIP_RNA,
+ OrderType.RNAFUSION,
+ OrderType.TOMTE,
+]
+
def are_all_samples_metagenome(samples: list[ExcelSample]) -> bool:
"""Check if all samples are metagenome samples"""
diff --git a/cg/apps/tb/api.py b/cg/apps/tb/api.py
index bb1b64b42e..304b10ba33 100644
--- a/cg/apps/tb/api.py
+++ b/cg/apps/tb/api.py
@@ -4,15 +4,15 @@
import logging
from typing import Any
-from google.auth import jwt
-from google.auth.crypt import RSASigner
+from google.auth.transport.requests import Request
+from google.oauth2 import service_account
from cg.apps.tb.dto.create_job_request import CreateJobRequest
from cg.apps.tb.dto.summary_response import AnalysisSummary, SummariesResponse
from cg.apps.tb.models import AnalysesResponse, TrailblazerAnalysis
from cg.constants import Workflow
from cg.constants.constants import APIMethods, FileFormat, JobType, WorkflowManager
-from cg.constants.priority import SlurmQos
+from cg.constants.priority import TrailblazerPriority
from cg.constants.tb import AnalysisStatus
from cg.exc import (
AnalysisNotCompletedError,
@@ -21,6 +21,7 @@
)
from cg.io.controller import APIRequest, ReadStream
+
LOG = logging.getLogger(__name__)
@@ -49,10 +50,12 @@ def __init__(self, config: dict):
@property
def auth_header(self) -> dict:
- signer = RSASigner.from_service_account_file(self.service_account_auth_file)
- payload = {"email": self.service_account}
- jwt_token = jwt.encode(signer=signer, payload=payload).decode("ascii")
- return {"Authorization": f"Bearer {jwt_token}"}
+ credentials = service_account.IDTokenCredentials.from_service_account_file(
+ filename=self.service_account_auth_file,
+ target_audience="trailblazer",
+ )
+ credentials.refresh(Request())
+ return {"Authorization": f"Bearer {credentials.token}"}
def query_trailblazer(
self, command: str, request_body: dict, method: str = APIMethods.POST
@@ -112,7 +115,7 @@ def add_pending_analysis(
analysis_type: str,
config_path: str,
out_dir: str,
- slurm_quality_of_service: SlurmQos,
+ priority: TrailblazerPriority,
email: str = None,
order_id: int | None = None,
workflow: Workflow = None,
@@ -128,7 +131,7 @@ def add_pending_analysis(
"config_path": config_path,
"order_id": order_id,
"out_dir": out_dir,
- "priority": slurm_quality_of_service,
+ "priority": priority,
"workflow": workflow.upper(),
"ticket": ticket,
"workflow_manager": workflow_manager,
diff --git a/cg/cli/add.py b/cg/cli/add.py
index 3aae8edbac..be9bb5b961 100644
--- a/cg/cli/add.py
+++ b/cg/cli/add.py
@@ -1,6 +1,6 @@
import logging
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS, is_case_name_allowed
from cg.constants import DataDelivery, Priority, Workflow
diff --git a/cg/cli/archive.py b/cg/cli/archive.py
index 35a76a3e9b..08ca079a78 100644
--- a/cg/cli/archive.py
+++ b/cg/cli/archive.py
@@ -1,4 +1,4 @@
-import click
+import rich_click as click
from click.core import ParameterSource
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
diff --git a/cg/cli/backup.py b/cg/cli/backup.py
index 7b18cdb127..b59b618acd 100644
--- a/cg/cli/backup.py
+++ b/cg/cli/backup.py
@@ -4,7 +4,7 @@
from pathlib import Path
from typing import Iterable
-import click
+import rich_click as click
import housekeeper.store.models as hk_models
from cg.apps.housekeeper.hk import HousekeeperAPI
diff --git a/cg/cli/base.py b/cg/cli/base.py
index 761644d3bb..9ba7925f52 100644
--- a/cg/cli/base.py
+++ b/cg/cli/base.py
@@ -4,7 +4,7 @@
import sys
from pathlib import Path
-import click
+import rich_click as click
import coloredlogs
from sqlalchemy.orm import scoped_session
diff --git a/cg/cli/clean.py b/cg/cli/clean.py
index 384c6f5fed..5dff4f00d2 100644
--- a/cg/cli/clean.py
+++ b/cg/cli/clean.py
@@ -4,7 +4,7 @@
from datetime import datetime, timedelta
from pathlib import Path
-import click
+import rich_click as click
from housekeeper.store.models import File, Version
from cg.apps.housekeeper.hk import HousekeeperAPI
diff --git a/cg/cli/compress/base.py b/cg/cli/compress/base.py
index 07094e925d..c64ee62d7c 100644
--- a/cg/cli/compress/base.py
+++ b/cg/cli/compress/base.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.cli.compress.fastq import (
clean_fastq,
diff --git a/cg/cli/compress/fastq.py b/cg/cli/compress/fastq.py
index 7ce779c8e8..156cb3ae77 100644
--- a/cg/cli/compress/fastq.py
+++ b/cg/cli/compress/fastq.py
@@ -3,7 +3,7 @@
import logging
from typing import Iterable
-import click
+import rich_click as click
from cg.apps.housekeeper.hk import HousekeeperAPI
from cg.cli.compress.helpers import (
diff --git a/cg/cli/delete/base.py b/cg/cli/delete/base.py
index 456e51d8b8..35d9b797da 100644
--- a/cg/cli/delete/base.py
+++ b/cg/cli/delete/base.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.cli.delete.case import delete_case
from cg.cli.delete.cases import delete_cases
diff --git a/cg/cli/delete/case.py b/cg/cli/delete/case.py
index 54e6c88940..775c70aaf8 100644
--- a/cg/cli/delete/case.py
+++ b/cg/cli/delete/case.py
@@ -3,7 +3,7 @@
import datetime
import logging
-import click
+import rich_click as click
from cg.cli.get import get_case as print_case
from cg.constants.cli_options import DRY_RUN, SKIP_CONFIRMATION
diff --git a/cg/cli/delete/cases.py b/cg/cli/delete/cases.py
index bcc9f2654b..e39373fe42 100644
--- a/cg/cli/delete/cases.py
+++ b/cg/cli/delete/cases.py
@@ -1,6 +1,6 @@
import logging
-import click
+import rich_click as click
from cg.cli.delete.case import delete_case
from cg.constants.cli_options import DRY_RUN
diff --git a/cg/cli/delete/illumina_sequencing_run.py b/cg/cli/delete/illumina_sequencing_run.py
index 8043582a35..0a0c70801e 100644
--- a/cg/cli/delete/illumina_sequencing_run.py
+++ b/cg/cli/delete/illumina_sequencing_run.py
@@ -1,4 +1,4 @@
-import click
+import rich_click as click
from cg.constants.cli_options import DRY_RUN
from cg.services.illumina.post_processing.housekeeper_storage import (
diff --git a/cg/cli/delete/observations.py b/cg/cli/delete/observations.py
index 32db5fa4cf..1ce8566314 100644
--- a/cg/cli/delete/observations.py
+++ b/cg/cli/delete/observations.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from sqlalchemy.orm import Query
from cg.cli.upload.observations.utils import get_observations_api
diff --git a/cg/cli/deliver/base.py b/cg/cli/deliver/base.py
index 39ba54e19e..8762c8c555 100644
--- a/cg/cli/deliver/base.py
+++ b/cg/cli/deliver/base.py
@@ -3,7 +3,7 @@
import logging
from pathlib import Path
-import click
+import rich_click as click
from cg.apps.tb import TrailblazerAPI
from cg.cli.deliver.utils import deliver_raw_data_for_analyses
@@ -15,7 +15,7 @@
from cg.services.deliver_files.deliver_files_service.deliver_files_service import (
DeliverFilesService,
)
-from cg.services.deliver_files.deliver_files_service.deliver_files_service_factory import (
+from cg.services.deliver_files.factory import (
DeliveryServiceFactory,
)
from cg.services.deliver_files.rsync.service import DeliveryRsyncService
@@ -88,8 +88,7 @@ def deliver_case(
LOG.error(f"Could not find case with id {case_id}")
return
delivery_service: DeliverFilesService = service_builder.build_delivery_service(
- case=case,
- delivery_type=delivery_type,
+ case=case, delivery_type=delivery_type
)
delivery_service.deliver_files_for_case(
case=case, delivery_base_path=Path(inbox), dry_run=dry_run
@@ -124,8 +123,7 @@ def deliver_ticket(
LOG.error(f"Could not find case connected to ticket {ticket}")
return
delivery_service: DeliverFilesService = service_builder.build_delivery_service(
- case=cases[0],
- delivery_type=delivery_type,
+ case=cases[0], delivery_type=delivery_type
)
delivery_service.deliver_files_for_ticket(
ticket_id=ticket, delivery_base_path=Path(inbox), dry_run=dry_run
@@ -172,8 +170,7 @@ def deliver_sample_raw_data(
LOG.error(f"Could not find case with id {case_id}")
return
delivery_service: DeliverFilesService = service_builder.build_delivery_service(
- case=case,
- delivery_type=delivery_type,
+ case=case, delivery_type=delivery_type
)
delivery_service.deliver_files_for_sample(
case=case, sample_id=sample_id, delivery_base_path=Path(inbox), dry_run=dry_run
diff --git a/cg/cli/deliver/utils.py b/cg/cli/deliver/utils.py
index 14e8255c51..f4b0040112 100644
--- a/cg/cli/deliver/utils.py
+++ b/cg/cli/deliver/utils.py
@@ -5,7 +5,7 @@
from cg.services.deliver_files.deliver_files_service.deliver_files_service import (
DeliverFilesService,
)
-from cg.services.deliver_files.deliver_files_service.deliver_files_service_factory import (
+from cg.services.deliver_files.factory import (
DeliveryServiceFactory,
)
from cg.store.models import Analysis, Case
@@ -26,8 +26,7 @@ def deliver_raw_data_for_analyses(
try:
case: Case = analysis.case
delivery_service: DeliverFilesService = service_builder.build_delivery_service(
- case=case,
- delivery_type=case.data_delivery,
+ case=case, delivery_type=case.data_delivery
)
delivery_service.deliver_files_for_case(
diff --git a/cg/cli/demultiplex/base.py b/cg/cli/demultiplex/base.py
index 4954b8b27b..a6baa60669 100644
--- a/cg/cli/demultiplex/base.py
+++ b/cg/cli/demultiplex/base.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.cli.demultiplex.demux import (
diff --git a/cg/cli/demultiplex/demux.py b/cg/cli/demultiplex/demux.py
index 4bc41f1103..187ead1222 100644
--- a/cg/cli/demultiplex/demux.py
+++ b/cg/cli/demultiplex/demux.py
@@ -2,7 +2,7 @@
from glob import glob
from pathlib import Path
-import click
+import rich_click as click
from pydantic import ValidationError
from cg.apps.demultiplex.demultiplex_api import DemultiplexingAPI
diff --git a/cg/cli/demultiplex/finish.py b/cg/cli/demultiplex/finish.py
index db710ce96d..0328f24c25 100644
--- a/cg/cli/demultiplex/finish.py
+++ b/cg/cli/demultiplex/finish.py
@@ -3,7 +3,7 @@
import logging
from pathlib import Path
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.constants.cli_options import DRY_RUN, FORCE
diff --git a/cg/cli/demultiplex/sample_sheet.py b/cg/cli/demultiplex/sample_sheet.py
index 3a860601df..42be67952c 100644
--- a/cg/cli/demultiplex/sample_sheet.py
+++ b/cg/cli/demultiplex/sample_sheet.py
@@ -1,7 +1,7 @@
import logging
from pathlib import Path
-import click
+import rich_click as click
from pydantic import ValidationError
from cg.apps.demultiplex.sample_sheet.api import IlluminaSampleSheetService
diff --git a/cg/cli/downsample.py b/cg/cli/downsample.py
index bd24e94a33..2b1695ee45 100644
--- a/cg/cli/downsample.py
+++ b/cg/cli/downsample.py
@@ -4,7 +4,7 @@
from pathlib import Path
from typing import Tuple
-import click
+import rich_click as click
from cg.apps.downsample.downsample import DownsampleAPI
from cg.apps.downsample.utils import store_downsampled_sample_bundle
diff --git a/cg/cli/generate/base.py b/cg/cli/generate/base.py
index c1f32e0efd..67de232244 100644
--- a/cg/cli/generate/base.py
+++ b/cg/cli/generate/base.py
@@ -1,6 +1,6 @@
"""Common CLI file generation functions"""
-import click
+import rich_click as click
from cg.cli.generate.delivery_report.base import (
generate_available_delivery_reports,
diff --git a/cg/cli/generate/delivery_report/base.py b/cg/cli/generate/delivery_report/base.py
index 834c4900b8..ab1676f985 100644
--- a/cg/cli/generate/delivery_report/base.py
+++ b/cg/cli/generate/delivery_report/base.py
@@ -5,7 +5,7 @@
from datetime import datetime
from pathlib import Path
-import click
+import rich_click as click
from housekeeper.store.models import Version
from cg.cli.generate.delivery_report.options import (
diff --git a/cg/cli/generate/delivery_report/options.py b/cg/cli/generate/delivery_report/options.py
index f41af3a4eb..54ac10aac7 100644
--- a/cg/cli/generate/delivery_report/options.py
+++ b/cg/cli/generate/delivery_report/options.py
@@ -1,6 +1,6 @@
"""Delivery report specific command options."""
-import click
+import rich_click as click
from cg.constants import REPORT_SUPPORTED_WORKFLOW
diff --git a/cg/cli/generate/delivery_report/utils.py b/cg/cli/generate/delivery_report/utils.py
index 66ab9ad6c7..fc11f4e321 100644
--- a/cg/cli/generate/delivery_report/utils.py
+++ b/cg/cli/generate/delivery_report/utils.py
@@ -3,7 +3,7 @@
import logging
from datetime import datetime
-import click
+import rich_click as click
from cg.constants import REPORT_SUPPORTED_DATA_DELIVERY, REPORT_SUPPORTED_WORKFLOW, Workflow
from cg.meta.delivery_report.balsamic import BalsamicDeliveryReportAPI
diff --git a/cg/cli/get.py b/cg/cli/get.py
index bf73a29121..ea49e4d5fd 100644
--- a/cg/cli/get.py
+++ b/cg/cli/get.py
@@ -2,7 +2,7 @@
import re
from typing import Iterable
-import click
+import rich_click as click
from tabulate import tabulate
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
diff --git a/cg/cli/post_process/post_process.py b/cg/cli/post_process/post_process.py
index f0fc1ccd8f..861d42c853 100644
--- a/cg/cli/post_process/post_process.py
+++ b/cg/cli/post_process/post_process.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.cli.post_process.utils import (
UnprocessedRunInfo,
diff --git a/cg/cli/sequencing_qc/sequencing_qc.py b/cg/cli/sequencing_qc/sequencing_qc.py
index 031c25fa2e..c1b20e6681 100644
--- a/cg/cli/sequencing_qc/sequencing_qc.py
+++ b/cg/cli/sequencing_qc/sequencing_qc.py
@@ -1,5 +1,5 @@
import logging
-import click
+import rich_click as click
from cg.models.cg_config import CGConfig
from cg.services.sequencing_qc_service.sequencing_qc_service import SequencingQCService
diff --git a/cg/cli/set/base.py b/cg/cli/set/base.py
index 08a9fc9034..d06aad0522 100644
--- a/cg/cli/set/base.py
+++ b/cg/cli/set/base.py
@@ -5,7 +5,7 @@
import logging
from typing import Iterable
-import click
+import rich_click as click
from cg.cli.set.case import set_case
from cg.cli.set.cases import set_cases
diff --git a/cg/cli/set/case.py b/cg/cli/set/case.py
index e84c8af929..40c1df7c18 100644
--- a/cg/cli/set/case.py
+++ b/cg/cli/set/case.py
@@ -3,7 +3,7 @@
import logging
from typing import Callable
-import click
+import rich_click as click
from cg.constants import DataDelivery, Priority, Workflow
from cg.constants.constants import CaseActions
diff --git a/cg/cli/set/cases.py b/cg/cli/set/cases.py
index 1c4b81433a..243d5f5020 100644
--- a/cg/cli/set/cases.py
+++ b/cg/cli/set/cases.py
@@ -1,6 +1,6 @@
import logging
-import click
+import rich_click as click
from cg.cli.set.case import set_case
from cg.constants import Priority
diff --git a/cg/cli/store/base.py b/cg/cli/store/base.py
index a9319605b2..8ef8520f41 100644
--- a/cg/cli/store/base.py
+++ b/cg/cli/store/base.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.apps.crunchy.crunchy import CrunchyAPI
from cg.apps.housekeeper.hk import HousekeeperAPI
diff --git a/cg/cli/store/store.py b/cg/cli/store/store.py
index 95513d8267..0f54f18045 100644
--- a/cg/cli/store/store.py
+++ b/cg/cli/store/store.py
@@ -2,7 +2,7 @@
from pathlib import Path
from typing import Iterable
-import click
+import rich_click as click
from housekeeper.store.models import File
from cg.apps.crunchy.files import update_metadata_paths
diff --git a/cg/cli/transfer.py b/cg/cli/transfer.py
index 720eca446b..13cd3318c5 100644
--- a/cg/cli/transfer.py
+++ b/cg/cli/transfer.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.apps.lims import LimsAPI
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
diff --git a/cg/cli/upload/base.py b/cg/cli/upload/base.py
index 4840e1af7d..46ffa1ac0d 100644
--- a/cg/cli/upload/base.py
+++ b/cg/cli/upload/base.py
@@ -4,7 +4,7 @@
import sys
import traceback
-import click
+import rich_click as click
from cg.cli.upload.coverage import upload_coverage
from cg.cli.upload.delivery_report import upload_delivery_report_to_scout
@@ -39,6 +39,7 @@
from cg.meta.upload.microsalt.microsalt_upload_api import MicrosaltUploadAPI
from cg.meta.upload.mip.mip_dna import MipDNAUploadAPI
from cg.meta.upload.mip.mip_rna import MipRNAUploadAPI
+from cg.meta.upload.mutant.mutant import MutantUploadAPI
from cg.meta.upload.nf_analysis import NfAnalysisUploadAPI
from cg.meta.upload.tomte.tomte import TomteUploadAPI
from cg.meta.upload.raredisease.raredisease import RarediseaseUploadAPI
@@ -94,6 +95,8 @@ def upload(context: click.Context, case_id: str | None, restart: bool):
Workflow.TAXPROFILER,
}:
upload_api = NfAnalysisUploadAPI(config_object, case.data_analysis)
+ elif case.data_analysis == Workflow.MUTANT:
+ upload_api = MutantUploadAPI(config_object)
context.obj.meta_apis["upload_api"] = upload_api
upload_api.upload(ctx=context, case=case, restart=restart)
diff --git a/cg/cli/upload/coverage.py b/cg/cli/upload/coverage.py
index f37ed7b028..7e53e1581b 100644
--- a/cg/cli/upload/coverage.py
+++ b/cg/cli/upload/coverage.py
@@ -1,6 +1,6 @@
"""Code for uploading coverage reports via CLI"""
-import click
+import rich_click as click
from cg.meta.upload.coverage import UploadCoverageApi
from cg.models.cg_config import CGConfig
diff --git a/cg/cli/upload/delivery_report.py b/cg/cli/upload/delivery_report.py
index 25e8d15c5a..03cf990558 100644
--- a/cg/cli/upload/delivery_report.py
+++ b/cg/cli/upload/delivery_report.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from housekeeper.store.models import Version
from cg.cli.generate.delivery_report.options import ARGUMENT_CASE_ID
diff --git a/cg/cli/upload/fohm.py b/cg/cli/upload/fohm.py
index 194b610cdb..34caf6ba9f 100644
--- a/cg/cli/upload/fohm.py
+++ b/cg/cli/upload/fohm.py
@@ -1,6 +1,6 @@
import logging
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.constants.cli_options import DRY_RUN
@@ -41,7 +41,11 @@ def aggregate_delivery(
context: CGConfig, cases: list, dry_run: bool = False, datestr: str | None = None
):
"""Re-aggregates delivery files for FOHM and saves them to default working directory."""
- fohm_api = FOHMUploadAPI(config=context, dry_run=dry_run, datestr=datestr)
+ fohm_api = FOHMUploadAPI(
+ config=context,
+ dry_run=dry_run,
+ datestr=datestr,
+ )
try:
fohm_api.aggregate_delivery(cases)
except (ValidationError, TypeError) as error:
@@ -57,7 +61,11 @@ def create_komplettering(
context: CGConfig, cases: list, dry_run: bool = False, datestr: str | None = None
):
"""Re-aggregates komplettering files for FOHM and saves them to default working directory."""
- fohm_api = FOHMUploadAPI(config=context, dry_run=dry_run, datestr=datestr)
+ fohm_api = FOHMUploadAPI(
+ config=context,
+ dry_run=dry_run,
+ datestr=datestr,
+ )
try:
fohm_api.create_and_write_complementary_report(cases)
except ValidationError as error:
@@ -73,7 +81,11 @@ def preprocess_all(
context: CGConfig, cases: list, dry_run: bool = False, datestr: str | None = None
):
"""Create all FOHM upload files, upload to GISAID, sync SFTP and mail reports for all provided cases."""
- fohm_api = FOHMUploadAPI(config=context, dry_run=dry_run, datestr=datestr)
+ fohm_api = FOHMUploadAPI(
+ config=context,
+ dry_run=dry_run,
+ datestr=datestr,
+ )
gisaid_api = GisaidAPI(config=context)
cases = list(cases)
upload_cases = []
@@ -105,7 +117,11 @@ def preprocess_all(
@click.pass_obj
def upload_rawdata(context: CGConfig, dry_run: bool = False, datestr: str | None = None):
"""Deliver files in daily upload directory via sftp."""
- fohm_api = FOHMUploadAPI(config=context, dry_run=dry_run, datestr=datestr)
+ fohm_api = FOHMUploadAPI(
+ config=context,
+ dry_run=dry_run,
+ datestr=datestr,
+ )
fohm_api.sync_files_sftp()
@@ -115,5 +131,9 @@ def upload_rawdata(context: CGConfig, dry_run: bool = False, datestr: str | None
@click.pass_obj
def send_reports(context: CGConfig, dry_run: bool = False, datestr: str | None = None):
"""Send all komplettering reports found in the current daily directory to target recipients."""
- fohm_api = FOHMUploadAPI(config=context, dry_run=dry_run, datestr=datestr)
+ fohm_api = FOHMUploadAPI(
+ config=context,
+ dry_run=dry_run,
+ datestr=datestr,
+ )
fohm_api.send_mail_reports()
diff --git a/cg/cli/upload/genotype.py b/cg/cli/upload/genotype.py
index ee79a25641..9371c51a6c 100644
--- a/cg/cli/upload/genotype.py
+++ b/cg/cli/upload/genotype.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.apps.gt import GenotypeAPI
from cg.apps.housekeeper.hk import HousekeeperAPI
diff --git a/cg/cli/upload/gens.py b/cg/cli/upload/gens.py
index a2ae7f55c0..ad317c4bae 100644
--- a/cg/cli/upload/gens.py
+++ b/cg/cli/upload/gens.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from housekeeper.store.models import File
from cg.apps.gens import GensAPI
diff --git a/cg/cli/upload/gisaid.py b/cg/cli/upload/gisaid.py
index 96b4e24d63..0a4461023d 100644
--- a/cg/cli/upload/gisaid.py
+++ b/cg/cli/upload/gisaid.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.meta.upload.gisaid import GisaidAPI
from cg.models.cg_config import CGConfig
diff --git a/cg/cli/upload/mutacc.py b/cg/cli/upload/mutacc.py
index c8f315fd52..a57aee91a6 100644
--- a/cg/cli/upload/mutacc.py
+++ b/cg/cli/upload/mutacc.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.apps.mutacc_auto import MutaccAutoAPI
from cg.apps.scout.scout_export import ScoutExportCase
diff --git a/cg/cli/upload/nipt/base.py b/cg/cli/upload/nipt/base.py
index 7c76e97158..72fd2ad482 100644
--- a/cg/cli/upload/nipt/base.py
+++ b/cg/cli/upload/nipt/base.py
@@ -3,7 +3,7 @@
import logging
import traceback
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.constants.cli_options import DRY_RUN, FORCE
diff --git a/cg/cli/upload/nipt/ftp.py b/cg/cli/upload/nipt/ftp.py
index bfd69c2f47..12c07ac7de 100644
--- a/cg/cli/upload/nipt/ftp.py
+++ b/cg/cli/upload/nipt/ftp.py
@@ -1,7 +1,7 @@
import logging
from pathlib import Path
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.constants.cli_options import DRY_RUN, FORCE
diff --git a/cg/cli/upload/nipt/statina.py b/cg/cli/upload/nipt/statina.py
index febe3f15c3..56f52be463 100644
--- a/cg/cli/upload/nipt/statina.py
+++ b/cg/cli/upload/nipt/statina.py
@@ -1,6 +1,6 @@
import logging
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.constants.cli_options import DRY_RUN, FORCE
diff --git a/cg/cli/upload/observations/observations.py b/cg/cli/upload/observations/observations.py
index 1f51e40dda..abddad3c38 100644
--- a/cg/cli/upload/observations/observations.py
+++ b/cg/cli/upload/observations/observations.py
@@ -3,7 +3,7 @@
import logging
from datetime import datetime
-import click
+import rich_click as click
from sqlalchemy.orm import Query
from cg.cli.upload.observations.utils import get_observations_api
diff --git a/cg/cli/upload/scout.py b/cg/cli/upload/scout.py
index a8a4064609..88812528ca 100644
--- a/cg/cli/upload/scout.py
+++ b/cg/cli/upload/scout.py
@@ -3,7 +3,7 @@
import logging
from pathlib import Path
-import click
+import rich_click as click
from housekeeper.store.models import File, Version
from cg.apps.housekeeper.hk import HousekeeperAPI
diff --git a/cg/cli/upload/utils.py b/cg/cli/upload/utils.py
index 5276b47475..8ca6db9d28 100644
--- a/cg/cli/upload/utils.py
+++ b/cg/cli/upload/utils.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.constants import Workflow
from cg.constants.constants import MAX_ITEMS_TO_RETRIEVE
diff --git a/cg/cli/upload/validate.py b/cg/cli/upload/validate.py
index 220934153e..bd2bd09055 100644
--- a/cg/cli/upload/validate.py
+++ b/cg/cli/upload/validate.py
@@ -1,6 +1,6 @@
"""Code for validating an upload via CLI"""
-import click
+import rich_click as click
from cg.apps.coverage import ChanjoAPI
from cg.models.cg_config import CGConfig
diff --git a/cg/cli/utils.py b/cg/cli/utils.py
index 6e55703502..be13b5c52b 100644
--- a/cg/cli/utils.py
+++ b/cg/cli/utils.py
@@ -1,7 +1,7 @@
import re
import shutil
-import click
+import rich_click as click
from cg.constants import Workflow
from cg.meta.workflow.raredisease import RarediseaseAnalysisAPI
diff --git a/cg/cli/workflow/balsamic/base.py b/cg/cli/workflow/balsamic/base.py
index b3028c1c30..afb18bc185 100644
--- a/cg/cli/workflow/balsamic/base.py
+++ b/cg/cli/workflow/balsamic/base.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from pydantic.v1 import ValidationError
from cg.apps.housekeeper.hk import HousekeeperAPI
diff --git a/cg/cli/workflow/balsamic/options.py b/cg/cli/workflow/balsamic/options.py
index 744393ee38..374640ecfe 100644
--- a/cg/cli/workflow/balsamic/options.py
+++ b/cg/cli/workflow/balsamic/options.py
@@ -1,4 +1,4 @@
-import click
+import rich_click as click
from cg.constants.constants import GenomeVersion
from cg.constants.priority import SlurmQos
diff --git a/cg/cli/workflow/balsamic/pon.py b/cg/cli/workflow/balsamic/pon.py
index 6c07b35925..186a3c038b 100644
--- a/cg/cli/workflow/balsamic/pon.py
+++ b/cg/cli/workflow/balsamic/pon.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.cli.workflow.balsamic.base import config_case, run, start
diff --git a/cg/cli/workflow/balsamic/qc.py b/cg/cli/workflow/balsamic/qc.py
index 2ea997e454..e2690ce334 100644
--- a/cg/cli/workflow/balsamic/qc.py
+++ b/cg/cli/workflow/balsamic/qc.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.cli.workflow.balsamic.base import (
diff --git a/cg/cli/workflow/balsamic/umi.py b/cg/cli/workflow/balsamic/umi.py
index b5b4914fa2..34df4328b1 100644
--- a/cg/cli/workflow/balsamic/umi.py
+++ b/cg/cli/workflow/balsamic/umi.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.cli.workflow.balsamic.base import (
diff --git a/cg/cli/workflow/base.py b/cg/cli/workflow/base.py
index 98b2451e8b..2d1cc1590c 100644
--- a/cg/cli/workflow/base.py
+++ b/cg/cli/workflow/base.py
@@ -1,6 +1,6 @@
"""Common CLI workflow functions"""
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.cli.workflow.balsamic.base import balsamic
@@ -14,6 +14,7 @@
from cg.cli.workflow.mip_dna.base import mip_dna
from cg.cli.workflow.mip_rna.base import mip_rna
from cg.cli.workflow.mutant.base import mutant
+from cg.cli.workflow.nallo.base import nallo
from cg.cli.workflow.raredisease.base import raredisease
from cg.cli.workflow.rnafusion.base import rnafusion
from cg.cli.workflow.taxprofiler.base import taxprofiler
@@ -27,16 +28,17 @@ def workflow():
workflow.add_command(balsamic)
workflow.add_command(balsamic_qc)
-workflow.add_command(balsamic_umi)
workflow.add_command(balsamic_pon)
+workflow.add_command(balsamic_umi)
+workflow.add_command(fluffy)
+workflow.add_command(jasen)
workflow.add_command(microsalt)
workflow.add_command(mip_dna)
workflow.add_command(mip_rna)
-workflow.add_command(fluffy)
-workflow.add_command(jasen)
workflow.add_command(mutant)
+workflow.add_command(nallo)
workflow.add_command(raredisease)
+workflow.add_command(raw_data)
workflow.add_command(rnafusion)
workflow.add_command(taxprofiler)
workflow.add_command(tomte)
-workflow.add_command(raw_data)
diff --git a/cg/cli/workflow/commands.py b/cg/cli/workflow/commands.py
index 8172181585..3d30b7e221 100644
--- a/cg/cli/workflow/commands.py
+++ b/cg/cli/workflow/commands.py
@@ -3,7 +3,7 @@
import shutil
from pathlib import Path
-import click
+import rich_click as click
from dateutil.parser import parse as parse_date
from cg.apps.housekeeper.hk import HousekeeperAPI
diff --git a/cg/cli/workflow/fluffy/base.py b/cg/cli/workflow/fluffy/base.py
index 1302f711d2..6da80b5639 100644
--- a/cg/cli/workflow/fluffy/base.py
+++ b/cg/cli/workflow/fluffy/base.py
@@ -1,6 +1,6 @@
import logging
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.cli.workflow.commands import link, resolve_compression, store, store_available
diff --git a/cg/cli/workflow/jasen/base.py b/cg/cli/workflow/jasen/base.py
index c4aafee671..c8a9fb747c 100644
--- a/cg/cli/workflow/jasen/base.py
+++ b/cg/cli/workflow/jasen/base.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.constants.constants import MetaApis
diff --git a/cg/cli/workflow/microsalt/base.py b/cg/cli/workflow/microsalt/base.py
index a34e76fa8d..474feb8e57 100644
--- a/cg/cli/workflow/microsalt/base.py
+++ b/cg/cli/workflow/microsalt/base.py
@@ -4,7 +4,7 @@
from pathlib import Path
from typing import Any
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.cli.workflow.commands import resolve_compression, store, store_available
diff --git a/cg/cli/workflow/mip/base.py b/cg/cli/workflow/mip/base.py
index 924310283d..ac6ac3e2c5 100644
--- a/cg/cli/workflow/mip/base.py
+++ b/cg/cli/workflow/mip/base.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.apps.environ import environ_email
from cg.cli.utils import echo_lines
diff --git a/cg/cli/workflow/mip/options.py b/cg/cli/workflow/mip/options.py
index 5a99124c28..7ad022fd81 100644
--- a/cg/cli/workflow/mip/options.py
+++ b/cg/cli/workflow/mip/options.py
@@ -1,4 +1,4 @@
-import click
+import rich_click as click
from cg.constants.priority import SlurmQos
diff --git a/cg/cli/workflow/mip_dna/base.py b/cg/cli/workflow/mip_dna/base.py
index de629558e6..eb503cc77e 100644
--- a/cg/cli/workflow/mip_dna/base.py
+++ b/cg/cli/workflow/mip_dna/base.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.cli.workflow.commands import (
diff --git a/cg/cli/workflow/mip_rna/base.py b/cg/cli/workflow/mip_rna/base.py
index 62c1d9d97e..a345c83e7e 100644
--- a/cg/cli/workflow/mip_rna/base.py
+++ b/cg/cli/workflow/mip_rna/base.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.cli.workflow.commands import link, resolve_compression, store, store_available
diff --git a/cg/cli/workflow/mutant/base.py b/cg/cli/workflow/mutant/base.py
index 740eb16e2e..e2f7f9310f 100644
--- a/cg/cli/workflow/mutant/base.py
+++ b/cg/cli/workflow/mutant/base.py
@@ -1,6 +1,6 @@
import logging
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.cli.workflow.commands import (
diff --git a/cg/cli/workflow/nallo/__init__.py b/cg/cli/workflow/nallo/__init__.py
new file mode 100644
index 0000000000..d3a1fde1a8
--- /dev/null
+++ b/cg/cli/workflow/nallo/__init__.py
@@ -0,0 +1 @@
+"""Init for module"""
diff --git a/cg/cli/workflow/nallo/base.py b/cg/cli/workflow/nallo/base.py
new file mode 100644
index 0000000000..4c724e8561
--- /dev/null
+++ b/cg/cli/workflow/nallo/base.py
@@ -0,0 +1,28 @@
+"""CLI support to create config and/or start NALLO."""
+
+import logging
+
+import rich_click as click
+
+from cg.cli.utils import CLICK_CONTEXT_SETTINGS
+
+from cg.cli.workflow.nf_analysis import config_case, run, start
+
+from cg.constants.constants import MetaApis
+from cg.meta.workflow.analysis import AnalysisAPI
+from cg.meta.workflow.nallo import NalloAnalysisAPI
+
+LOG = logging.getLogger(__name__)
+
+
+@click.group(invoke_without_command=True, context_settings=CLICK_CONTEXT_SETTINGS)
+@click.pass_context
+def nallo(context: click.Context) -> None:
+ """GMS/Nallo analysis workflow."""
+ AnalysisAPI.get_help(context)
+ context.obj.meta_apis[MetaApis.ANALYSIS_API] = NalloAnalysisAPI(config=context.obj)
+
+
+nallo.add_command(config_case)
+nallo.add_command(run)
+nallo.add_command(start)
diff --git a/cg/cli/workflow/nf_analysis.py b/cg/cli/workflow/nf_analysis.py
index 7011df85b2..655f3078b0 100644
--- a/cg/cli/workflow/nf_analysis.py
+++ b/cg/cli/workflow/nf_analysis.py
@@ -2,18 +2,19 @@
import logging
-import click
+import rich_click as click
from pydantic import ValidationError
from cg.cli.workflow.commands import ARGUMENT_CASE_ID
from cg.cli.workflow.utils import validate_force_store_option
-from cg.constants import EXIT_FAIL, EXIT_SUCCESS
+from cg.constants import EXIT_FAIL, EXIT_SUCCESS, Workflow
from cg.constants.cli_options import DRY_RUN, FORCE, COMMENT
from cg.constants.constants import MetaApis
from cg.exc import AnalysisNotReadyError, CgError, HousekeeperStoreError
from cg.meta.workflow.nf_analysis import NfAnalysisAPI
from cg.models.cg_config import CGConfig
+from cg.store.models import Case
LOG = logging.getLogger(__name__)
@@ -186,7 +187,9 @@ def start(
analysis_api: NfAnalysisAPI = context.meta_apis[MetaApis.ANALYSIS_API]
try:
analysis_api.status_db.verify_case_exists(case_internal_id=case_id)
- analysis_api.prepare_fastq_files(case_id=case_id, dry_run=dry_run)
+ case: Case = analysis_api.status_db.get_case_by_internal_id(case_id)
+ if case.data_analysis != Workflow.NALLO:
+ analysis_api.prepare_fastq_files(case_id=case_id, dry_run=dry_run)
analysis_api.config_case(case_id=case_id, dry_run=dry_run)
analysis_api.run_nextflow_analysis(
case_id=case_id,
diff --git a/cg/cli/workflow/raredisease/base.py b/cg/cli/workflow/raredisease/base.py
index 7910a8a3e0..172f225557 100644
--- a/cg/cli/workflow/raredisease/base.py
+++ b/cg/cli/workflow/raredisease/base.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS, echo_lines
from cg.cli.workflow.commands import ARGUMENT_CASE_ID, resolve_compression
diff --git a/cg/cli/workflow/raw_data/base.py b/cg/cli/workflow/raw_data/base.py
index a771b09014..490db36db7 100644
--- a/cg/cli/workflow/raw_data/base.py
+++ b/cg/cli/workflow/raw_data/base.py
@@ -1,6 +1,6 @@
import logging
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.cli.workflow.commands import ARGUMENT_CASE_ID
diff --git a/cg/cli/workflow/raw_data/raw_data_service.py b/cg/cli/workflow/raw_data/raw_data_service.py
index 55685b8ec3..b04648b29f 100644
--- a/cg/cli/workflow/raw_data/raw_data_service.py
+++ b/cg/cli/workflow/raw_data/raw_data_service.py
@@ -1,9 +1,10 @@
import datetime as dt
from cg.apps.tb.api import TrailblazerAPI
-from cg.constants.constants import AnalysisType, Workflow
-from cg.constants.tb import AnalysisStatus
+from cg.constants.constants import Workflow
+from cg.constants.tb import AnalysisStatus, AnalysisType
from cg.exc import CaseNotFoundError
+from cg.meta.workflow.utils.utils import MAP_TO_TRAILBLAZER_PRIORITY
from cg.store.models import Analysis, Case
from cg.store.store import Store
@@ -41,7 +42,7 @@ def _add_analysis_to_trailblazer(self, case: Case) -> None:
config_path="",
order_id=case.latest_order.id,
out_dir="",
- slurm_quality_of_service=case.slurm_priority,
+ priority=MAP_TO_TRAILBLAZER_PRIORITY[case.priority],
workflow=Workflow.RAW_DATA,
ticket=case.latest_ticket,
)
diff --git a/cg/cli/workflow/rnafusion/base.py b/cg/cli/workflow/rnafusion/base.py
index 27901d336e..18f9977feb 100644
--- a/cg/cli/workflow/rnafusion/base.py
+++ b/cg/cli/workflow/rnafusion/base.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.cli.workflow.commands import resolve_compression
diff --git a/cg/cli/workflow/taxprofiler/base.py b/cg/cli/workflow/taxprofiler/base.py
index 14a6782d67..6c22a8b8bb 100644
--- a/cg/cli/workflow/taxprofiler/base.py
+++ b/cg/cli/workflow/taxprofiler/base.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.cli.workflow.commands import resolve_compression
diff --git a/cg/cli/workflow/tomte/base.py b/cg/cli/workflow/tomte/base.py
index 4fc3a0c85c..0a0ab8a4e2 100644
--- a/cg/cli/workflow/tomte/base.py
+++ b/cg/cli/workflow/tomte/base.py
@@ -2,7 +2,7 @@
import logging
-import click
+import rich_click as click
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
from cg.cli.workflow.commands import resolve_compression
diff --git a/cg/cli/workflow/utils.py b/cg/cli/workflow/utils.py
index 832266712e..bbcc0f6298 100644
--- a/cg/cli/workflow/utils.py
+++ b/cg/cli/workflow/utils.py
@@ -1,6 +1,6 @@
"""CLI utility methods."""
-import click
+import rich_click as click
def validate_force_store_option(force: bool, comment: str | None):
diff --git a/cg/clients/freshdesk/models.py b/cg/clients/freshdesk/models.py
index aa0be87ab4..0e082cb4cc 100644
--- a/cg/clients/freshdesk/models.py
+++ b/cg/clients/freshdesk/models.py
@@ -15,7 +15,7 @@ class TicketCreate(BaseModel):
name: str
priority: int = Priority.LOW
source: int = Source.EMAIL
- status: int = Status.OPEN
+ status: int = Status.PENDING
subject: str
tags: list[str] = []
type: str | None = None
diff --git a/cg/constants/__init__.py b/cg/constants/__init__.py
index 8901dbc8e6..4c13d30875 100644
--- a/cg/constants/__init__.py
+++ b/cg/constants/__init__.py
@@ -6,6 +6,7 @@
CAPTUREKIT_OPTIONS,
CONTAINER_OPTIONS,
DEFAULT_CAPTURE_KIT,
+ DNA_WORKFLOWS_WITH_SCOUT_UPLOAD,
STATUS_OPTIONS,
DataDelivery,
FileExtensions,
diff --git a/cg/constants/cli_options.py b/cg/constants/cli_options.py
index d6976797fe..d03116a35a 100644
--- a/cg/constants/cli_options.py
+++ b/cg/constants/cli_options.py
@@ -1,4 +1,4 @@
-import click
+import rich_click as click
DRY_RUN = click.option(
"--dry-run",
diff --git a/cg/constants/constants.py b/cg/constants/constants.py
index 210449a2c7..c2dd3e5842 100644
--- a/cg/constants/constants.py
+++ b/cg/constants/constants.py
@@ -86,14 +86,6 @@ def statuses(cls) -> list[str]:
return list(map(lambda status: status.value, cls))
-class AnalysisType(StrEnum):
- TARGETED_GENOME_SEQUENCING: str = "tgs"
- WHOLE_EXOME_SEQUENCING: str = "wes"
- WHOLE_GENOME_SEQUENCING: str = "wgs"
- WHOLE_TRANSCRIPTOME_SEQUENCING: str = "wts"
- OTHER: str = "other"
-
-
class CancerAnalysisType(StrEnum):
TUMOR_NORMAL = auto()
TUMOR_NORMAL_PANEL = auto()
@@ -131,6 +123,7 @@ class Workflow(StrEnum):
MIP_DNA: str = "mip-dna"
MIP_RNA: str = "mip-rna"
MUTANT: str = "mutant"
+ NALLO: str = "nallo"
RAREDISEASE: str = "raredisease"
RAW_DATA: str = "raw-data"
RNAFUSION: str = "rnafusion"
@@ -140,6 +133,13 @@ class Workflow(StrEnum):
TOMTE: str = "tomte"
+DNA_WORKFLOWS_WITH_SCOUT_UPLOAD: list[Workflow] = [
+ Workflow.MIP_DNA,
+ Workflow.BALSAMIC,
+ Workflow.BALSAMIC_UMI,
+]
+
+
class FileFormat(StrEnum):
CSV: str = "csv"
FASTQ: str = "fastq"
diff --git a/cg/constants/delivery.py b/cg/constants/delivery.py
index ec492f28f0..f914305e3e 100644
--- a/cg/constants/delivery.py
+++ b/cg/constants/delivery.py
@@ -144,7 +144,6 @@
]
MUTANT_ANALYSIS_SAMPLE_TAGS: list[set[str]] = [
- {"fastq"},
{"vcf", "vcf-report", "fohm-delivery"},
]
diff --git a/cg/constants/nf_analysis.py b/cg/constants/nf_analysis.py
index 77d53f3dff..87e7cd72b9 100644
--- a/cg/constants/nf_analysis.py
+++ b/cg/constants/nf_analysis.py
@@ -51,7 +51,7 @@ class NfTowerStatus(StrEnum):
memory = { 4.GB * task.attempt }
time = { 4.h * task.attempt }
cpus = 2
- ext.args = ' --data-format json '
+ ext.args = ' --data-format json --cl-config "max_table_rows: 10000" '
}
}
"""
diff --git a/cg/constants/orderforms.py b/cg/constants/orderforms.py
index 8c69f97073..fdd70bcd25 100644
--- a/cg/constants/orderforms.py
+++ b/cg/constants/orderforms.py
@@ -1,19 +1,11 @@
from enum import StrEnum
from cg.constants import ANALYSIS_SOURCES, METAGENOME_SOURCES
-from cg.models.orders.order import OrderType
SEX_MAP = {"male": "M", "female": "F", "unknown": "unknown"}
REV_SEX_MAP = {value: key for key, value in SEX_MAP.items()}
-CONTAINER_TYPES = ["Tube", "96 well plate"]
SOURCE_TYPES = set().union(METAGENOME_SOURCES, ANALYSIS_SOURCES)
-CASE_PROJECT_TYPES = [
- OrderType.MIP_DNA,
- OrderType.BALSAMIC,
- OrderType.MIP_RNA,
-]
-
class Orderform(StrEnum):
BALSAMIC: str = "1508"
@@ -36,10 +28,10 @@ class Orderform(StrEnum):
def get_current_orderform_version(order_form: str) -> str:
"""Returns the current version of the given order form."""
current_order_form_versions = {
- Orderform.MIP_DNA: "32",
+ Orderform.MIP_DNA: "33",
Orderform.RML: "19",
Orderform.MICROSALT: "11",
- Orderform.SARS_COV_2: "9",
+ Orderform.SARS_COV_2: "10",
Orderform.MICROBIAL_FASTQ: "1",
Orderform.PACBIO_LONG_READ: "1",
}
@@ -79,4 +71,5 @@ def get_current_orderform_version(order_form: str) -> str:
"LaboratorieMedicinskt Centrum Gotland": "621 84 Visby",
"Unilabs Eskilstuna Laboratorium": "631 88 Eskilstuna",
"Norrland University Hospital": "901 85 Umeå",
+ "Länssjukhuset Sundsvall": "856 43 Sundsvall",
}
diff --git a/cg/constants/priority.py b/cg/constants/priority.py
index fd9172af26..6c3600087f 100644
--- a/cg/constants/priority.py
+++ b/cg/constants/priority.py
@@ -11,6 +11,13 @@ class SlurmQos(StrEnum):
EXPRESS: str = "express"
+class TrailblazerPriority(StrEnum):
+ LOW: str = "low"
+ NORMAL: str = "normal"
+ HIGH: str = "high"
+ EXPRESS: str = "express"
+
+
class PriorityTerms(StrEnum):
EXPRESS: str = "express"
PRIORITY: str = "priority"
diff --git a/cg/constants/sequencing.py b/cg/constants/sequencing.py
index e22b9652ab..e3361f9821 100644
--- a/cg/constants/sequencing.py
+++ b/cg/constants/sequencing.py
@@ -72,3 +72,10 @@ class SeqLibraryPrepCategory(StrEnum):
WHOLE_EXOME_SEQUENCING: str = "wes"
WHOLE_GENOME_SEQUENCING: str = "wgs"
WHOLE_TRANSCRIPTOME_SEQUENCING: str = "wts"
+
+
+DNA_PREP_CATEGORIES: list[SeqLibraryPrepCategory] = [
+ SeqLibraryPrepCategory.WHOLE_GENOME_SEQUENCING,
+ SeqLibraryPrepCategory.TARGETED_GENOME_SEQUENCING,
+ SeqLibraryPrepCategory.WHOLE_EXOME_SEQUENCING,
+]
diff --git a/cg/constants/tb.py b/cg/constants/tb.py
index c52267f818..cec5bb528d 100644
--- a/cg/constants/tb.py
+++ b/cg/constants/tb.py
@@ -1,5 +1,7 @@
from enum import StrEnum
+from cg.constants.sequencing import SeqLibraryPrepCategory
+
class AnalysisStatus:
CANCELLED: str = "cancelled"
@@ -12,9 +14,9 @@ class AnalysisStatus:
QC: str = "qc"
-class AnalysisTypes(StrEnum):
- WGS: str = "wgs"
- WES: str = "wes"
- TGS: str = "tgs"
- WTS: str = "wts"
+class AnalysisType(StrEnum):
OTHER: str = "other"
+ TGS: str = SeqLibraryPrepCategory.TARGETED_GENOME_SEQUENCING
+ WES: str = SeqLibraryPrepCategory.WHOLE_EXOME_SEQUENCING
+ WGS: str = SeqLibraryPrepCategory.WHOLE_GENOME_SEQUENCING
+ WTS: str = SeqLibraryPrepCategory.WHOLE_TRANSCRIPTOME_SEQUENCING
diff --git a/cg/exc.py b/cg/exc.py
index 3e6e80ac27..0c3fab12d9 100644
--- a/cg/exc.py
+++ b/cg/exc.py
@@ -174,6 +174,12 @@ class OrderError(CgError):
"""
+class OrderSubmissionError(CgError):
+ """
+ Exception related to order submission.
+ """
+
+
class OrderFormError(CgError):
"""
Exception related to the order form.
@@ -298,10 +304,6 @@ class OrderNotFoundError(CgError):
"""Exception raised when an order is not found."""
-class OrderExistsError(CgError):
- """Exception raised when cases and samples are added to a pre-existing order."""
-
-
class OrderMismatchError(CgError):
"""Exception raised when cases expected to belong to the same order are not part of the same order."""
diff --git a/cg/meta/archive/archive.py b/cg/meta/archive/archive.py
index b744d5e0d6..553ab98bda 100644
--- a/cg/meta/archive/archive.py
+++ b/cg/meta/archive/archive.py
@@ -1,7 +1,7 @@
import logging
from typing import Callable, Type
-import click
+import rich_click as click
from housekeeper.store.models import Archive, File
from pydantic import BaseModel, ConfigDict
diff --git a/cg/meta/delivery_report/balsamic.py b/cg/meta/delivery_report/balsamic.py
index 238d53ae28..dacc61b1d8 100644
--- a/cg/meta/delivery_report/balsamic.py
+++ b/cg/meta/delivery_report/balsamic.py
@@ -17,8 +17,8 @@
REQUIRED_SAMPLE_TIMESTAMP_FIELDS,
Workflow,
)
-from cg.constants.constants import AnalysisType
from cg.constants.scout import ScoutUploadKey
+from cg.constants.tb import AnalysisType
from cg.meta.delivery_report.data_validators import get_million_read_pairs
from cg.meta.delivery_report.delivery_report_api import DeliveryReportAPI
from cg.meta.workflow.balsamic import BalsamicAnalysisAPI
@@ -52,9 +52,7 @@ def get_sample_metadata(
passed_initial_qc: bool | None = self.lims_api.has_sample_passed_initial_qc(
sample.internal_id
)
- if AnalysisType.WHOLE_GENOME_SEQUENCING in self.analysis_api.get_data_analysis_type(
- case.internal_id
- ):
+ if AnalysisType.WGS in self.analysis_api.get_data_analysis_type(case.internal_id):
return self.get_wgs_metadata(
million_read_pairs=million_read_pairs,
passed_initial_qc=passed_initial_qc,
diff --git a/cg/meta/delivery_report/templates/macros/data_analysis/limitations.html b/cg/meta/delivery_report/templates/macros/data_analysis/limitations.html
index 2951ce14a6..a51504f03c 100644
--- a/cg/meta/delivery_report/templates/macros/data_analysis/limitations.html
+++ b/cg/meta/delivery_report/templates/macros/data_analysis/limitations.html
@@ -12,7 +12,7 @@
Teknisk beskrivning och begränsningar av analysen
{% for application in applications %}
- {{ application.tag }} (v{{ application.version }}) |
+ {{ application.tag }} |
{{ application.description }} |
{{ application.details }} |
diff --git a/cg/meta/delivery_report/templates/macros/order.html b/cg/meta/delivery_report/templates/macros/order.html
index 22b95144fc..419eff05de 100644
--- a/cg/meta/delivery_report/templates/macros/order.html
+++ b/cg/meta/delivery_report/templates/macros/order.html
@@ -31,7 +31,7 @@ Beställning
| {{ sample.name }} ({{ sample.id }}) |
{{ sample.timestamps.ordered_at }} |
{{ sample.ticket }} |
- {{ sample.application.tag }} (v{{ sample.application.version }}) |
+ {{ sample.application.tag }} |
{% if customer_workflow != "taxprofiler" %} {{ sample.sex }} {% endif %} |
{{ sample.source }} |
diff --git a/cg/meta/orders/__init__.py b/cg/meta/orders/__init__.py
deleted file mode 100644
index 4142bdd47b..0000000000
--- a/cg/meta/orders/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .api import OrdersAPI
diff --git a/cg/meta/orders/api.py b/cg/meta/orders/api.py
deleted file mode 100644
index 228a9570e0..0000000000
--- a/cg/meta/orders/api.py
+++ /dev/null
@@ -1,60 +0,0 @@
-"""Unified interface to handle sample submissions.
-
-This interface will update information in Status and/or LIMS as required.
-
-The normal entry for information is through the REST API which will pass a JSON
-document with all information about samples in the submission. The input will
-be validated and if passing all checks be accepted as new samples.
-"""
-
-import logging
-
-from cg.apps.lims import LimsAPI
-from cg.meta.orders.ticket_handler import TicketHandler
-from cg.models.orders.order import OrderIn, OrderType
-from cg.services.orders.submitters.order_submitter_registry import (
- OrderSubmitterRegistry,
-)
-from cg.store.store import Store
-
-LOG = logging.getLogger(__name__)
-
-
-class OrdersAPI:
- """Orders API for accepting new samples into the system."""
-
- def __init__(
- self,
- lims: LimsAPI,
- status: Store,
- ticket_handler: TicketHandler,
- submitter_registry: OrderSubmitterRegistry,
- ):
- super().__init__()
- self.lims = lims
- self.status = status
- self.ticket_handler = ticket_handler
- self.submitter_registry = submitter_registry
-
- def submit(self, project: OrderType, order_in: OrderIn, user_name: str, user_mail: str) -> dict:
- """Submit a batch of samples.
-
- Main entry point for the class towards interfaces that implements it.
- """
- submit_handler = self.submitter_registry.get_order_submitter(project)
- submit_handler.order_validation_service.validate_order(order_in)
- # detect manual ticket assignment
- ticket_number: str | None = self.ticket_handler.parse_ticket_number(order_in.name)
- if not ticket_number:
- ticket_number = self.ticket_handler.create_ticket(
- order=order_in, user_name=user_name, user_mail=user_mail, project=project
- )
- else:
- self.ticket_handler.connect_to_ticket(
- order=order_in,
- user_name=user_name,
- project=project,
- ticket_number=ticket_number,
- )
- order_in.ticket = ticket_number
- return submit_handler.submit_order(order_in=order_in)
diff --git a/cg/meta/upload/balsamic/balsamic.py b/cg/meta/upload/balsamic/balsamic.py
index c9ce637fba..f297a367bc 100644
--- a/cg/meta/upload/balsamic/balsamic.py
+++ b/cg/meta/upload/balsamic/balsamic.py
@@ -3,7 +3,7 @@
import datetime as dt
import logging
-import click
+import rich_click as click
from cg.apps.gens import GensAPI
from cg.cli.generate.delivery_report.base import generate_delivery_report
diff --git a/cg/meta/upload/fohm/fohm.py b/cg/meta/upload/fohm/fohm.py
index 8026debda8..f416777a48 100644
--- a/cg/meta/upload/fohm/fohm.py
+++ b/cg/meta/upload/fohm/fohm.py
@@ -3,22 +3,22 @@
import logging
import os
import re
-import shutil
from pathlib import Path
-
import paramiko
-from housekeeper.store.models import Version
-
from cg.apps.housekeeper.hk import HousekeeperAPI
from cg.apps.lims import LimsAPI
from cg.constants import FileExtensions
-from cg.constants.constants import SARS_COV_REGEX
+from cg.constants.constants import SARS_COV_REGEX, DataDelivery
from cg.constants.housekeeper_tags import FohmTag
from cg.exc import CgError
from cg.io.csv import read_csv, write_csv_from_dict
from cg.models.cg_config import CGConfig
from cg.models.email import EmailInfo
from cg.models.fohm.reports import FohmComplementaryReport, FohmPangolinReport
+from cg.services.deliver_files.constants import DeliveryDestination, DeliveryStructure
+from cg.services.deliver_files.factory import (
+ DeliveryServiceFactory,
+)
from cg.store.models import Case, Sample
from cg.store.store import Store
from cg.utils.dict import remove_duplicate_dicts
@@ -28,7 +28,12 @@
class FOHMUploadAPI:
- def __init__(self, config: CGConfig, dry_run: bool = False, datestr: str | None = None):
+ def __init__(
+ self,
+ config: CGConfig,
+ dry_run: bool = False,
+ datestr: str | None = None,
+ ):
self.config: CGConfig = config
self.housekeeper_api: HousekeeperAPI = config.housekeeper_api
self.lims_api: LimsAPI = config.lims_api
@@ -44,6 +49,7 @@ def __init__(self, config: CGConfig, dry_run: bool = False, datestr: str | None
self._reports_dataframe = None
self._pangolin_dataframe = None
self._aggregation_dataframe = None
+ self._delivery_factory: DeliveryServiceFactory = config.delivery_service_factory
@property
def current_datestr(self) -> str:
@@ -196,16 +202,16 @@ def link_sample_raw_data_files(
sample: Sample = self.status_db.get_sample_by_internal_id(
internal_id=report.internal_id
)
- bundle_name: str = sample.links[0].case.internal_id
- version: Version = self.housekeeper_api.last_version(bundle=bundle_name)
- files = self.housekeeper_api.files(version=version.id, tags={report.internal_id}).all()
- for file in files:
- if self._dry_run:
- LOG.info(
- f"Would have copied {file.full_path} to {Path(self.daily_rawdata_path)}"
- )
- continue
- shutil.copy(file.full_path, Path(self.daily_rawdata_path))
+ case: Case = sample.links[0].case
+ delivery_service = self._delivery_factory.build_delivery_service(
+ case=case,
+ delivery_type=DataDelivery.FASTQ_ANALYSIS,
+ delivery_destination=DeliveryDestination.FOHM,
+ delivery_structure=DeliveryStructure.FLAT,
+ )
+ delivery_service.deliver_files_for_sample_no_rsync(
+ case=case, sample_id=sample.internal_id, delivery_base_path=self.daily_rawdata_path
+ )
def create_pangolin_report(self, reports: list[FohmPangolinReport]) -> None:
LOG.info("Creating aggregate Pangolin report")
@@ -362,9 +368,13 @@ def parse_and_write_pangolin_report(self) -> list[FohmPangolinReport]:
self.create_pangolin_report(sars_cov_pangolin_reports)
return sars_cov_pangolin_reports
- def aggregate_delivery(self, cases: list[str]) -> None:
- """Aggregate and hardlink reports."""
- self.set_cases_to_aggregate(cases)
+ def aggregate_delivery(self, case_ids: list[str]) -> None:
+ """
+ Aggregate and hardlink reports.
+ args:
+ case_ids: The internal ids for cases to aggregate.
+ """
+ self.set_cases_to_aggregate(case_ids)
self.create_daily_delivery_folders()
sars_cov_complementary_reports: list[FohmComplementaryReport] = (
self.parse_and_write_complementary_report()
diff --git a/cg/meta/upload/microsalt/microsalt_upload_api.py b/cg/meta/upload/microsalt/microsalt_upload_api.py
index f2d35a1f24..9bc966d9ad 100644
--- a/cg/meta/upload/microsalt/microsalt_upload_api.py
+++ b/cg/meta/upload/microsalt/microsalt_upload_api.py
@@ -1,6 +1,6 @@
import logging
-import click
+import rich_click as click
from cg.meta.upload.upload_api import UploadAPI
diff --git a/cg/meta/upload/mip/mip_dna.py b/cg/meta/upload/mip/mip_dna.py
index 9b045b6448..423d9cf30f 100644
--- a/cg/meta/upload/mip/mip_dna.py
+++ b/cg/meta/upload/mip/mip_dna.py
@@ -3,7 +3,7 @@
import datetime as dt
import logging
-import click
+import rich_click as click
from cg.cli.generate.delivery_report.base import generate_delivery_report
from cg.cli.upload.coverage import upload_coverage
diff --git a/cg/meta/upload/mip/mip_rna.py b/cg/meta/upload/mip/mip_rna.py
index 8223242a5f..ddfacff544 100644
--- a/cg/meta/upload/mip/mip_rna.py
+++ b/cg/meta/upload/mip/mip_rna.py
@@ -3,7 +3,7 @@
import logging
from subprocess import CalledProcessError
-import click
+import rich_click as click
from cg.cli.upload.scout import upload_rna_to_scout
diff --git a/cg/meta/upload/mutant/mutant.py b/cg/meta/upload/mutant/mutant.py
new file mode 100644
index 0000000000..17d3760279
--- /dev/null
+++ b/cg/meta/upload/mutant/mutant.py
@@ -0,0 +1,28 @@
+from click import Context
+
+from cg.meta.upload.fohm.fohm import FOHMUploadAPI
+from cg.meta.upload.gisaid import GisaidAPI
+from cg.meta.upload.upload_api import UploadAPI
+from cg.meta.workflow.mutant import MutantAnalysisAPI
+from cg.models.cg_config import CGConfig
+from cg.store.models import Analysis, Case
+
+
+class MutantUploadAPI(UploadAPI):
+
+ def __init__(self, config: CGConfig):
+ self.analysis_api: MutantAnalysisAPI = MutantAnalysisAPI(config)
+ self.fohm_api = FOHMUploadAPI(config)
+ self.gsaid_api = GisaidAPI(config)
+
+ super().__init__(config=config, analysis_api=self.analysis_api)
+
+ def upload(self, ctx: Context, case: Case, restart: bool) -> None:
+ latest_analysis: Analysis = case.analyses[0]
+ self.update_upload_started_at(latest_analysis)
+ self.upload_files_to_customer_inbox(case)
+ self.gsaid_api.upload(case.internal_id)
+ self.fohm_api.aggregate_delivery(case_ids=[case.internal_id])
+ self.fohm_api.sync_files_sftp()
+ self.fohm_api.send_mail_reports()
+ self.update_uploaded_at(latest_analysis)
diff --git a/cg/meta/upload/nf_analysis.py b/cg/meta/upload/nf_analysis.py
index 11da3a7ee8..a9e90fef56 100644
--- a/cg/meta/upload/nf_analysis.py
+++ b/cg/meta/upload/nf_analysis.py
@@ -3,7 +3,7 @@
import datetime as dt
import logging
-import click
+import rich_click as click
from cg.cli.generate.delivery_report.base import generate_delivery_report
from cg.cli.upload.scout import upload_to_scout
diff --git a/cg/meta/upload/raredisease/raredisease.py b/cg/meta/upload/raredisease/raredisease.py
index 3646daed11..17bea8738b 100644
--- a/cg/meta/upload/raredisease/raredisease.py
+++ b/cg/meta/upload/raredisease/raredisease.py
@@ -3,7 +3,7 @@
import datetime as dt
import logging
-import click
+import rich_click as click
from cg.cli.generate.delivery_report.base import generate_delivery_report
from cg.cli.upload.genotype import upload_genotypes
diff --git a/cg/meta/upload/scout/balsamic_config_builder.py b/cg/meta/upload/scout/balsamic_config_builder.py
index 61f8a34b3a..ac90858420 100644
--- a/cg/meta/upload/scout/balsamic_config_builder.py
+++ b/cg/meta/upload/scout/balsamic_config_builder.py
@@ -5,12 +5,7 @@
from cg.apps.lims import LimsAPI
from cg.constants.constants import SampleType
from cg.constants.housekeeper_tags import HK_DELIVERY_REPORT_TAG
-from cg.constants.scout import (
- BALSAMIC_CASE_TAGS,
- BALSAMIC_SAMPLE_TAGS,
- GenomeBuild,
- UploadTrack,
-)
+from cg.constants.scout import BALSAMIC_CASE_TAGS, BALSAMIC_SAMPLE_TAGS, GenomeBuild, UploadTrack
from cg.constants.subject import PhenotypeStatus
from cg.meta.upload.scout.hk_tags import CaseTags, SampleTags
from cg.meta.upload.scout.scout_config_builder import ScoutConfigBuilder
@@ -71,7 +66,7 @@ def build_config_sample(self, case_sample: CaseSample) -> ScoutCancerIndividual:
def get_balsamic_analysis_type(self, sample: Sample) -> str:
"""Returns a formatted balsamic analysis type"""
- analysis_type: str = BalsamicAnalysisAPI.get_application_type(sample_obj=sample)
+ analysis_type: str = BalsamicAnalysisAPI.get_analysis_type(sample=sample)
if analysis_type == "tgs":
analysis_type = "panel"
if analysis_type == "wgs":
diff --git a/cg/meta/upload/scout/uploadscoutapi.py b/cg/meta/upload/scout/uploadscoutapi.py
index 17ec5b4492..f08a75e2f1 100644
--- a/cg/meta/upload/scout/uploadscoutapi.py
+++ b/cg/meta/upload/scout/uploadscoutapi.py
@@ -398,7 +398,10 @@ def upload_omics_sample_id_to_scout(
)
def upload_rna_fraser_outrider_to_scout(
- self, dry_run: bool, case_id: str, rna_dna_collections: list[RNADNACollection], cust_id: str
+ self,
+ dry_run: bool,
+ case_id: str,
+ rna_dna_collections: list[RNADNACollection],
) -> None:
"""Upload omics fraser and outrider file for a case to Scout."""
status_db: Store = self.status_db
@@ -430,7 +433,7 @@ def upload_rna_fraser_outrider_to_scout(
outrider_file_path=rna_outrider.full_path,
case_id=dna_case_id,
customer_case_name=customer_case.name,
- cust_id=cust_id,
+ cust_id=customer_case.customer.internal_id,
)
for upload_statement in self.get_rna_fraser_outrider_upload_summary(rna_dna_collections):
LOG.info(upload_statement)
@@ -441,7 +444,6 @@ def upload_rna_genome_build_to_scout(
dry_run: bool,
rna_case: str,
rna_dna_collections: list[RNADNACollection],
- cust_id: str,
) -> None:
"""Upload RNA genome built for a RNA/DNA case to Scout."""
status_db: Store = self.status_db
@@ -463,7 +465,7 @@ def upload_rna_genome_build_to_scout(
self.scout_api.upload_rna_genome_build(
case_id=dna_case_id,
customer_case_name=customer_case.name,
- cust_id=cust_id,
+ cust_id=customer_case.customer.internal_id,
rna_genome_build=rna_genome_build,
)
@@ -614,7 +616,6 @@ def upload_rna_omics_to_scout(self, dry_run: bool, case_id: str) -> None:
status_db: Store = self.status_db
rna_case = status_db.get_case_by_internal_id(case_id)
rna_dna_collections: list[RNADNACollection] = self.create_rna_dna_collections(rna_case)
- cust_id: str = rna_case.customer.internal_id
self.upload_omics_sample_id_to_scout(
dry_run=dry_run, rna_dna_collections=rna_dna_collections
)
@@ -622,13 +623,11 @@ def upload_rna_omics_to_scout(self, dry_run: bool, case_id: str) -> None:
dry_run=dry_run,
case_id=case_id,
rna_dna_collections=rna_dna_collections,
- cust_id=cust_id,
)
self.upload_rna_genome_build_to_scout(
dry_run=dry_run,
rna_case=rna_case,
rna_dna_collections=rna_dna_collections,
- cust_id=cust_id,
)
self.load_rna_variant_outlier_to_scout(
dry_run=dry_run, rna_dna_collections=rna_dna_collections
diff --git a/cg/meta/upload/tomte/tomte.py b/cg/meta/upload/tomte/tomte.py
index 57cad6fbc4..f676c93dc5 100644
--- a/cg/meta/upload/tomte/tomte.py
+++ b/cg/meta/upload/tomte/tomte.py
@@ -3,7 +3,7 @@
import logging
from subprocess import CalledProcessError
-import click
+import rich_click as click
from cg.cli.generate.delivery_report.base import generate_delivery_report
from cg.cli.upload.scout import upload_tomte_to_scout
diff --git a/cg/meta/upload/upload_api.py b/cg/meta/upload/upload_api.py
index 4593395ad1..ce52c9d913 100644
--- a/cg/meta/upload/upload_api.py
+++ b/cg/meta/upload/upload_api.py
@@ -4,7 +4,7 @@
from datetime import datetime, timedelta
from pathlib import Path
-import click
+import rich_click as click
from cg.exc import AnalysisAlreadyUploadedError, AnalysisUploadError
from cg.meta.meta import MetaAPI
@@ -15,7 +15,7 @@
from cg.services.deliver_files.deliver_files_service.deliver_files_service import (
DeliverFilesService,
)
-from cg.services.deliver_files.deliver_files_service.deliver_files_service_factory import (
+from cg.services.deliver_files.factory import (
DeliveryServiceFactory,
)
from cg.store.models import Analysis, Case
@@ -97,8 +97,7 @@ def upload_files_to_customer_inbox(self, case: Case) -> None:
"""Uploads the analysis files to the customer inbox."""
factory_service: DeliveryServiceFactory = self.config.delivery_service_factory
delivery_service: DeliverFilesService = factory_service.build_delivery_service(
- case=case,
- delivery_type=case.data_delivery,
+ case=case, delivery_type=case.data_delivery
)
delivery_service.deliver_files_for_case(
case=case, delivery_base_path=Path(self.config.delivery_path)
diff --git a/cg/meta/workflow/analysis.py b/cg/meta/workflow/analysis.py
index d5516895b0..409ca58b9f 100644
--- a/cg/meta/workflow/analysis.py
+++ b/cg/meta/workflow/analysis.py
@@ -6,14 +6,13 @@
from subprocess import CalledProcessError
from typing import Iterator
-import click
+import rich_click as click
from housekeeper.store.models import Bundle, Version
from cg.apps.environ import environ_email
from cg.clients.chanjo2.models import CoverageMetrics
from cg.constants import EXIT_FAIL, EXIT_SUCCESS, Priority, SequencingFileTag, Workflow
from cg.constants.constants import (
- AnalysisType,
CaseActions,
CustomerId,
FileFormat,
@@ -21,15 +20,16 @@
WorkflowManager,
)
from cg.constants.gene_panel import GenePanelCombo, GenePanelMasterList
-from cg.constants.priority import SlurmQos
+from cg.constants.priority import SlurmQos, TrailblazerPriority
from cg.constants.scout import HGNC_ID, ScoutExportFileName
-from cg.constants.tb import AnalysisStatus
+from cg.constants.sequencing import SeqLibraryPrepCategory
+from cg.constants.tb import AnalysisStatus, AnalysisType
from cg.exc import AnalysisNotReadyError, BundleAlreadyAddedError, CgDataError, CgError
from cg.io.controller import WriteFile
from cg.meta.archive.archive import SpringArchiveAPI
from cg.meta.meta import MetaAPI
from cg.meta.workflow.fastq import FastqHandler
-from cg.meta.workflow.utils.utils import are_all_samples_control
+from cg.meta.workflow.utils.utils import are_all_samples_control, MAP_TO_TRAILBLAZER_PRIORITY
from cg.models.analysis import AnalysisModel
from cg.models.cg_config import CGConfig
from cg.models.fastq import FastqFileMeta
@@ -137,6 +137,12 @@ def get_slurm_qos_for_case(self, case_id: str) -> str:
priority: int = case.priority or Priority.research
return Priority.priority_to_slurm_qos().get(priority)
+ def get_trailblazer_priority(self, case_id: str) -> int:
+ """Get the priority for the case in Trailblazer."""
+ case: Case = self.status_db.get_case_by_internal_id(internal_id=case_id)
+ priority: int = case.priority
+ return MAP_TO_TRAILBLAZER_PRIORITY[priority]
+
def get_workflow_manager(self) -> str:
"""Get workflow manager for a given workflow."""
return WorkflowManager.Slurm.value
@@ -168,17 +174,18 @@ def get_bundle_deliverables_type(self, case_id: str) -> str | None:
return None
@staticmethod
- def get_application_type(sample_obj: Sample) -> str:
+ def get_analysis_type(sample: Sample) -> str:
"""
- Gets application type for sample. Only application types supported by trailblazer (or other)
+ Return the analysis type for sample.
+ Only analysis types supported by Trailblazer
are valid outputs.
"""
- prep_category: str = sample_obj.prep_category
+ prep_category: str = sample.prep_category
if prep_category and prep_category.lower() in {
- AnalysisType.TARGETED_GENOME_SEQUENCING,
- AnalysisType.WHOLE_EXOME_SEQUENCING,
- AnalysisType.WHOLE_GENOME_SEQUENCING,
- AnalysisType.WHOLE_TRANSCRIPTOME_SEQUENCING,
+ SeqLibraryPrepCategory.TARGETED_GENOME_SEQUENCING,
+ SeqLibraryPrepCategory.WHOLE_EXOME_SEQUENCING,
+ SeqLibraryPrepCategory.WHOLE_GENOME_SEQUENCING,
+ SeqLibraryPrepCategory.WHOLE_TRANSCRIPTOME_SEQUENCING,
}:
return prep_category.lower()
return AnalysisType.OTHER
@@ -186,7 +193,7 @@ def get_application_type(sample_obj: Sample) -> str:
def get_case_application_type(self, case_id: str) -> str:
"""Returns the application type for samples in a case."""
samples: list[Sample] = self.status_db.get_samples_by_case_id(case_id)
- application_types: set[str] = {self.get_application_type(sample) for sample in samples}
+ application_types: set[str] = {self.get_analysis_type(sample) for sample in samples}
if len(application_types) > 1:
raise CgError(
@@ -197,7 +204,7 @@ def get_case_application_type(self, case_id: str) -> str:
def are_case_samples_rna(self, case_id: str) -> bool:
analysis_type: str = self.get_case_application_type(case_id)
- return analysis_type == AnalysisType.WHOLE_TRANSCRIPTOME_SEQUENCING
+ return analysis_type == AnalysisType.WTS
def get_case_source_type(self, case_id: str) -> str | None:
"""
@@ -219,7 +226,7 @@ def get_case_source_type(self, case_id: str) -> str | None:
def has_case_only_exome_samples(self, case_id: str) -> bool:
"""Returns True if the application type for all samples in a case is WHOLE_EXOME_SEQUENCING."""
application_type: str = self.get_case_application_type(case_id)
- return application_type == AnalysisType.WHOLE_EXOME_SEQUENCING
+ return application_type == AnalysisType.WES
def upload_bundle_housekeeper(
self, case_id: str, dry_run: bool = False, force: bool = False
@@ -284,26 +291,26 @@ def add_pending_trailblazer_analysis(
tower_workflow_id: str | None = None,
) -> None:
self.check_analysis_ongoing(case_id)
- application_type: str = self.get_application_type(
+ analysis_type: str = self.get_analysis_type(
self.status_db.get_case_by_internal_id(case_id).links[0].sample
)
config_path: str = self.get_job_ids_path(case_id).as_posix()
email: str = environ_email()
order_id: int = self._get_order_id_from_case_id(case_id)
out_dir: str = self.get_job_ids_path(case_id).parent.as_posix()
- slurm_quality_of_service: str = self.get_slurm_qos_for_case(case_id)
+ priority: TrailblazerPriority = self.get_trailblazer_priority(case_id)
ticket: str = self.status_db.get_latest_ticket_from_case(case_id)
workflow: Workflow = self.workflow
workflow_manager: str = self.get_workflow_manager()
is_case_for_development: bool = self._is_case_for_development(case_id)
self.trailblazer_api.add_pending_analysis(
- analysis_type=application_type,
+ analysis_type=analysis_type,
case_id=case_id,
config_path=config_path,
email=email,
order_id=order_id,
out_dir=out_dir,
- slurm_quality_of_service=slurm_quality_of_service,
+ priority=priority,
ticket=ticket,
workflow=workflow,
workflow_manager=workflow_manager,
diff --git a/cg/meta/workflow/balsamic.py b/cg/meta/workflow/balsamic.py
index 2c1b465db2..bbef5c0faf 100644
--- a/cg/meta/workflow/balsamic.py
+++ b/cg/meta/workflow/balsamic.py
@@ -134,7 +134,7 @@ def get_bundle_deliverables_type(self, case_id: str) -> str:
self.status_db.get_case_by_internal_id(internal_id=case_id).links
)
- application_type: str = self.get_application_type(
+ application_type: str = self.get_analysis_type(
self.status_db.get_case_by_internal_id(internal_id=case_id).links[0].sample
)
sample_type = "tumor"
@@ -500,7 +500,7 @@ def get_sample_params(self, case_id: str, panel_bed: str | None) -> dict:
link_object.sample.internal_id: {
"sex": link_object.sample.sex,
"tissue_type": self.get_sample_type(link_object.sample).value,
- "application_type": self.get_application_type(link_object.sample),
+ "application_type": self.get_analysis_type(link_object.sample),
"target_bed": self.resolve_target_bed(panel_bed=panel_bed, link_object=link_object),
}
for link_object in self.status_db.get_case_by_internal_id(internal_id=case_id).links
@@ -512,7 +512,7 @@ def get_sample_params(self, case_id: str, panel_bed: str | None) -> dict:
def resolve_target_bed(self, panel_bed: str | None, link_object: CaseSample) -> str | None:
if panel_bed:
return panel_bed
- if self.get_application_type(link_object.sample) not in self.__BALSAMIC_BED_APPLICATIONS:
+ if self.get_analysis_type(link_object.sample) not in self.__BALSAMIC_BED_APPLICATIONS:
return None
return self.get_target_bed_from_lims(link_object.case.internal_id)
diff --git a/cg/meta/workflow/microsalt/microsalt.py b/cg/meta/workflow/microsalt/microsalt.py
index 59f191d485..c3cd228bed 100644
--- a/cg/meta/workflow/microsalt/microsalt.py
+++ b/cg/meta/workflow/microsalt/microsalt.py
@@ -5,7 +5,7 @@
from pathlib import Path
from typing import Any
-import click
+import rich_click as click
from cg.constants import EXIT_FAIL, EXIT_SUCCESS, Priority, Workflow
from cg.constants.constants import FileExtensions
diff --git a/cg/meta/workflow/mip_dna.py b/cg/meta/workflow/mip_dna.py
index c5090509d7..88961c1734 100644
--- a/cg/meta/workflow/mip_dna.py
+++ b/cg/meta/workflow/mip_dna.py
@@ -1,10 +1,10 @@
import logging
from cg.constants import DEFAULT_CAPTURE_KIT, Workflow
-from cg.constants.constants import AnalysisType
from cg.constants.gene_panel import GENOME_BUILD_37
from cg.constants.pedigree import Pedigree
from cg.constants.scout import MIP_CASE_TAGS
+from cg.constants.tb import AnalysisType
from cg.meta.workflow.mip import MipAnalysisAPI
from cg.models.cg_config import CGConfig
from cg.models.mip.mip_analysis import MipAnalysis
@@ -54,7 +54,7 @@ def config_sample(
) -> dict[str, str | int | None]:
"""Return config sample data."""
sample_data: dict[str, str | int] = self.get_sample_data(link_obj=link_obj)
- if sample_data["analysis_type"] == AnalysisType.WHOLE_GENOME_SEQUENCING:
+ if sample_data["analysis_type"] == AnalysisType.WGS:
sample_data["capture_kit"]: str = panel_bed or DEFAULT_CAPTURE_KIT
else:
sample_data["capture_kit"]: str | None = panel_bed or self.get_target_bed_from_lims(
@@ -90,10 +90,8 @@ def get_data_analysis_type(self, case_id: str) -> str:
link.sample.application_version.application.analysis_type for link in case.links
}
if len(analysis_types) > 1:
- LOG.warning(
- f"Multiple analysis types found. Defaulting to {AnalysisType.WHOLE_GENOME_SEQUENCING}."
- )
- return AnalysisType.WHOLE_GENOME_SEQUENCING
+ LOG.warning(f"Multiple analysis types found. Defaulting to {AnalysisType.WGS}.")
+ return AnalysisType.WGS
return analysis_types.pop() if analysis_types else None
def get_scout_upload_case_tags(self) -> dict:
diff --git a/cg/meta/workflow/nallo.py b/cg/meta/workflow/nallo.py
new file mode 100644
index 0000000000..f53b431b04
--- /dev/null
+++ b/cg/meta/workflow/nallo.py
@@ -0,0 +1,93 @@
+"""Module for Nallo Analysis API."""
+
+import logging
+from cg.constants import Workflow
+from cg.constants.subject import PlinkPhenotypeStatus, PlinkSex
+from cg.meta.workflow.nf_analysis import NfAnalysisAPI
+from cg.models.cg_config import CGConfig
+from cg.models.nallo.nallo import NalloSampleSheetHeaders, NalloSampleSheetEntry, NalloParameters
+from cg.store.models import CaseSample
+from pathlib import Path
+
+LOG = logging.getLogger(__name__)
+
+
+class NalloAnalysisAPI(NfAnalysisAPI):
+ """Handles communication between Nallo processes
+ and the rest of CG infrastructure."""
+
+ def __init__(
+ self,
+ config: CGConfig,
+ workflow: Workflow = Workflow.NALLO,
+ ):
+ super().__init__(config=config, workflow=workflow)
+ self.root_dir: str = config.nallo.root
+ self.workflow_bin_path: str = config.nallo.workflow_bin_path
+ self.profile: str = config.nallo.profile
+ self.conda_env: str = config.nallo.conda_env
+ self.conda_binary: str = config.nallo.conda_binary
+ self.platform: str = config.nallo.platform
+ self.params: str = config.nallo.params
+ self.workflow_config_path: str = config.nallo.config
+ self.resources: str = config.nallo.resources
+ self.tower_binary_path: str = config.tower_binary_path
+ self.tower_workflow: str = config.nallo.tower_workflow
+ self.account: str = config.nallo.slurm.account
+ self.email: str = config.nallo.slurm.mail_user
+ self.compute_env_base: str = config.nallo.compute_env
+ self.revision: str = config.nallo.revision
+ self.nextflow_binary_path: str = config.nallo.binary_path
+
+ @property
+ def sample_sheet_headers(self) -> list[str]:
+ """Headers for sample sheet."""
+ return NalloSampleSheetHeaders.list()
+
+ def get_sample_sheet_content_per_sample(self, case_sample: CaseSample) -> list[list[str]]:
+ """Collect and format information required to build a sample sheet for a single sample."""
+ read_file_paths = self.get_bam_read_file_paths(sample=case_sample.sample)
+ sample_sheet_entries = []
+
+ for bam_path in read_file_paths:
+ sample_sheet_entry = NalloSampleSheetEntry(
+ project=case_sample.case.internal_id,
+ sample=case_sample.sample.internal_id,
+ read_file=Path(bam_path),
+ family_id=case_sample.case.internal_id,
+ paternal_id=case_sample.get_paternal_sample_id or "0",
+ maternal_id=case_sample.get_maternal_sample_id or "0",
+ sex=self.get_sex_code(case_sample.sample.sex),
+ phenotype=self.get_phenotype_code(case_sample.status),
+ )
+ sample_sheet_entries.extend(sample_sheet_entry.reformat_sample_content)
+ return sample_sheet_entries
+
+ @staticmethod
+ def get_phenotype_code(phenotype: str) -> int:
+ """Return Nallo phenotype code."""
+ LOG.debug("Translate phenotype to integer code")
+ try:
+ code = PlinkPhenotypeStatus[phenotype.upper()]
+ except KeyError:
+ raise ValueError(f"{phenotype} is not a valid phenotype")
+ return code
+
+ @staticmethod
+ def get_sex_code(sex: str) -> int:
+ """Return Nallo sex code."""
+ LOG.debug("Translate sex to integer code")
+ try:
+ code = PlinkSex[sex.upper()]
+ except KeyError:
+ raise ValueError(f"{sex} is not a valid sex")
+ return code
+
+ def get_built_workflow_parameters(self, case_id: str) -> NalloParameters:
+ """Return parameters."""
+ outdir = self.get_case_path(case_id=case_id)
+
+ return NalloParameters(
+ input=self.get_sample_sheet_path(case_id=case_id),
+ outdir=outdir,
+ )
diff --git a/cg/meta/workflow/nf_analysis.py b/cg/meta/workflow/nf_analysis.py
index 7677b8397e..a0acdf0bf6 100644
--- a/cg/meta/workflow/nf_analysis.py
+++ b/cg/meta/workflow/nf_analysis.py
@@ -18,6 +18,7 @@
WorkflowManager,
)
from cg.constants.gene_panel import GenePanelGenomeBuild
+from cg.constants.housekeeper_tags import AlignmentFileTag
from cg.constants.nextflow import NFX_WORK_DIR
from cg.constants.nf_analysis import NfTowerStatus
from cg.constants.tb import AnalysisStatus
@@ -63,7 +64,7 @@ def __init__(self, config: CGConfig, workflow: Workflow):
self.conda_binary: str | None = None
self.platform: str | None = None
self.params: str | None = None
- self.config: str | None = None
+ self.workflow_config_path: str | None = None
self.resources: str | None = None
self.tower_binary_path: str | None = None
self.tower_workflow: str | None = None
@@ -136,7 +137,7 @@ def get_nextflow_config_content(self, case_id: str) -> str:
"""Return nextflow config content."""
config_files_list: list[str] = [
self.platform,
- self.config,
+ self.workflow_config_path,
self.resources,
]
extra_parameters_str: list[str] = [
@@ -252,6 +253,15 @@ def get_paired_read_paths(self, sample=Sample) -> tuple[list[str], list[str]]:
)
return fastq_forward_read_paths, fastq_reverse_read_paths
+ def get_bam_read_file_paths(self, sample=Sample) -> list[Path]:
+ """Gather BAM file path for a sample based on the BAM tag."""
+ return [
+ Path(hk_file.full_path)
+ for hk_file in self.housekeeper_api.files(
+ bundle=sample.internal_id, tags={AlignmentFileTag.BAM}
+ )
+ ]
+
def get_sample_sheet_content_per_sample(self, case_sample: CaseSample) -> list[list[str]]:
"""Collect and format information required to build a sample sheet for a single sample."""
raise NotImplementedError
diff --git a/cg/meta/workflow/raredisease.py b/cg/meta/workflow/raredisease.py
index 35186e082a..694c78a574 100644
--- a/cg/meta/workflow/raredisease.py
+++ b/cg/meta/workflow/raredisease.py
@@ -14,7 +14,7 @@
CoverageSample,
)
from cg.constants import DEFAULT_CAPTURE_KIT, Workflow
-from cg.constants.constants import AnalysisType, GenomeVersion
+from cg.constants.constants import GenomeVersion
from cg.constants.nf_analysis import (
RAREDISEASE_COVERAGE_FILE_TAGS,
RAREDISEASE_COVERAGE_INTERVAL_TYPE,
@@ -24,6 +24,7 @@
)
from cg.constants.scout import RAREDISEASE_CASE_TAGS, ScoutExportFileName
from cg.constants.subject import PlinkPhenotypeStatus, PlinkSex
+from cg.constants.tb import AnalysisType
from cg.meta.workflow.nf_analysis import NfAnalysisAPI
from cg.models.cg_config import CGConfig
from cg.models.deliverables.metric_deliverables import MetricsBase, MultiqcDataJson
@@ -55,7 +56,7 @@ def __init__(
self.conda_binary: str = config.raredisease.conda_binary
self.platform: str = config.raredisease.platform
self.params: str = config.raredisease.params
- self.config: str = config.raredisease.config
+ self.workflow_config_path: str = config.raredisease.config
self.resources: str = config.raredisease.resources
self.tower_binary_path: str = config.tower_binary_path
self.tower_workflow: str = config.raredisease.tower_workflow
@@ -98,13 +99,13 @@ def get_target_bed(self, case_id: str, analysis_type: str) -> str:
"""
target_bed_file: str = self.get_target_bed_from_lims(case_id=case_id)
if not target_bed_file:
- if analysis_type == AnalysisType.WHOLE_GENOME_SEQUENCING:
+ if analysis_type == AnalysisType.WGS:
return DEFAULT_CAPTURE_KIT
raise ValueError("No capture kit was found in LIMS")
return target_bed_file
def get_germlinecnvcaller_flag(self, analysis_type: str) -> bool:
- if analysis_type == AnalysisType.WHOLE_GENOME_SEQUENCING:
+ if analysis_type == AnalysisType.WGS:
return True
return False
diff --git a/cg/meta/workflow/rnafusion.py b/cg/meta/workflow/rnafusion.py
index 14c3da4afe..1df69e55ef 100644
--- a/cg/meta/workflow/rnafusion.py
+++ b/cg/meta/workflow/rnafusion.py
@@ -35,7 +35,7 @@ def __init__(
self.conda_binary: str = config.rnafusion.conda_binary
self.platform: str = config.rnafusion.platform
self.params: str = config.rnafusion.params
- self.config: str = config.rnafusion.config
+ self.workflow_config_path: str = config.rnafusion.config
self.resources: str = config.rnafusion.resources
self.tower_binary_path: str = config.tower_binary_path
self.tower_workflow: str = config.rnafusion.tower_workflow
diff --git a/cg/meta/workflow/tomte.py b/cg/meta/workflow/tomte.py
index 9eb4aa7df6..2b1204c5ec 100644
--- a/cg/meta/workflow/tomte.py
+++ b/cg/meta/workflow/tomte.py
@@ -31,7 +31,7 @@ def __init__(
self.conda_binary: str = config.tomte.conda_binary
self.platform: str = config.tomte.platform
self.params: str = config.tomte.params
- self.config: str = config.tomte.config
+ self.workflow_config_path: str = config.tomte.config
self.resources: str = config.tomte.resources
self.tower_binary_path: str = config.tower_binary_path
self.tower_workflow: str = config.tomte.tower_workflow
diff --git a/cg/meta/workflow/utils/utils.py b/cg/meta/workflow/utils/utils.py
index a42753f210..fb2a08c00b 100644
--- a/cg/meta/workflow/utils/utils.py
+++ b/cg/meta/workflow/utils/utils.py
@@ -1,4 +1,5 @@
from cg.constants.constants import ControlOptions
+from cg.constants.priority import TrailblazerPriority
from cg.store.models import Case
@@ -9,3 +10,12 @@ def are_all_samples_control(case: Case) -> bool:
sample.control in [ControlOptions.NEGATIVE, ControlOptions.POSITIVE]
for sample in case.samples
)
+
+
+MAP_TO_TRAILBLAZER_PRIORITY: dict[int, TrailblazerPriority] = {
+ 0: TrailblazerPriority.LOW,
+ 1: TrailblazerPriority.NORMAL,
+ 2: TrailblazerPriority.HIGH,
+ 3: TrailblazerPriority.EXPRESS,
+ 4: TrailblazerPriority.NORMAL,
+}
diff --git a/cg/models/cg_config.py b/cg/models/cg_config.py
index 77af8d2181..803c7d0d9d 100644
--- a/cg/models/cg_config.py
+++ b/cg/models/cg_config.py
@@ -27,9 +27,7 @@
from cg.meta.delivery.delivery import DeliveryAPI
from cg.services.analysis_service.analysis_service import AnalysisService
from cg.services.decompression_service.decompressor import Decompressor
-from cg.services.deliver_files.deliver_files_service.deliver_files_service_factory import (
- DeliveryServiceFactory,
-)
+from cg.services.deliver_files.factory import DeliveryServiceFactory
from cg.services.deliver_files.rsync.models import RsyncDeliveryConfig
from cg.services.deliver_files.rsync.service import DeliveryRsyncService
from cg.services.fastq_concatenation_service.fastq_concatenation_service import (
@@ -144,6 +142,11 @@ class StatinaConfig(BaseModel):
class CommonAppConfig(BaseModel):
binary_path: str | None = None
config_path: str | None = None
+ container_mount_volume: str | None = None
+
+
+class HermesConfig(CommonAppConfig):
+ container_path: str
class FluffyUploadConfig(BaseModel):
@@ -210,6 +213,24 @@ class MipConfig(BaseModel):
script: str
+class NalloConfig(CommonAppConfig):
+ binary_path: str | None = None
+ compute_env: str
+ conda_binary: str | None = None
+ conda_env: str
+ platform: str
+ params: str
+ config: str
+ resources: str
+ launch_directory: str
+ workflow_bin_path: str
+ profile: str
+ revision: str
+ root: str
+ slurm: SlurmConfig
+ tower_workflow: str
+
+
class RarediseaseConfig(CommonAppConfig):
binary_path: str | None = None
compute_env: str
@@ -406,7 +427,7 @@ class CGConfig(BaseModel):
genotype_api_: GenotypeAPI = None
gens: CommonAppConfig = None
gens_api_: GensAPI = None
- hermes: CommonAppConfig = None
+ hermes: HermesConfig = None
hermes_api_: HermesApi = None
janus: ClientConfig | None = None
janus_api_: JanusAPIClient | None = None
@@ -441,6 +462,7 @@ class CGConfig(BaseModel):
mip_rd_dna: MipConfig | None = Field(None, alias="mip-rd-dna")
mip_rd_rna: MipConfig | None = Field(None, alias="mip-rd-rna")
mutant: MutantConfig | None = None
+ nallo: NalloConfig | None = None
raredisease: RarediseaseConfig | None = None
rnafusion: RnafusionConfig | None = None
statina: StatinaConfig | None = None
@@ -747,6 +769,7 @@ def delivery_service_factory(self) -> DeliveryServiceFactory:
LOG.debug("Instantiating delivery service factory")
factory = DeliveryServiceFactory(
store=self.status_db,
+ lims_api=self.lims_api,
hk_api=self.housekeeper_api,
tb_service=self.trailblazer_api,
rsync_service=self.delivery_rsync_service,
diff --git a/cg/services/deliver_files/file_formatter/utils/__init__.py b/cg/models/nallo/__init__.py
similarity index 100%
rename from cg/services/deliver_files/file_formatter/utils/__init__.py
rename to cg/models/nallo/__init__.py
diff --git a/cg/models/nallo/nallo.py b/cg/models/nallo/nallo.py
new file mode 100644
index 0000000000..c5463d5fe1
--- /dev/null
+++ b/cg/models/nallo/nallo.py
@@ -0,0 +1,63 @@
+from enum import StrEnum
+from pathlib import Path
+
+from pydantic import BaseModel, field_validator
+
+from cg.exc import NfSampleSheetError
+from cg.models.nf_analysis import WorkflowParameters
+
+
+class NalloSampleSheetEntry(BaseModel):
+ """Nallo sample model is used when building the sample sheet."""
+
+ project: str
+ sample: str
+ read_file: Path
+ family_id: str
+ paternal_id: str
+ maternal_id: str
+ sex: int
+ phenotype: int
+
+ @property
+ def reformat_sample_content(self) -> list[list[str]]:
+ """Reformat sample sheet content as a list of lists, where each list represents a line in the final file."""
+ return [
+ [
+ self.project,
+ self.sample,
+ self.read_file,
+ self.family_id,
+ self.paternal_id,
+ self.maternal_id,
+ self.sex,
+ self.phenotype,
+ ]
+ ]
+
+ @field_validator("read_file")
+ @classmethod
+ def read_file_exists(cls, bam_path: Path) -> Path:
+ """Verify that bam files exist."""
+ if not bam_path.is_file():
+ raise NfSampleSheetError(f"Bam file does not exist: {str(bam_path)}")
+ return bam_path
+
+
+class NalloSampleSheetHeaders(StrEnum):
+ project: str = "project"
+ sample: str = "sample"
+ file: str = "file"
+ family_id: str = "family_id"
+ paternal_id: str = "paternal_id"
+ maternal_id: str = "maternal_id"
+ sex: str = "sex"
+ phenotype: str = "phenotype"
+
+ @classmethod
+ def list(cls) -> list[str]:
+ return list(map(lambda header: header.value, cls))
+
+
+class NalloParameters(WorkflowParameters):
+ """Model for Nallo parameters."""
diff --git a/cg/models/orders/order.py b/cg/models/orders/order.py
deleted file mode 100644
index 5674e72c37..0000000000
--- a/cg/models/orders/order.py
+++ /dev/null
@@ -1,43 +0,0 @@
-from typing import Any
-
-from pydantic.v1 import BaseModel, conlist, constr
-
-from cg.models.orders.constants import OrderType
-from cg.models.orders.samples import sample_class_for
-from cg.store.models import Customer, Sample
-
-
-class OrderIn(BaseModel):
- name: constr(min_length=2, max_length=Sample.order.property.columns[0].type.length)
- comment: str | None
- customer: constr(min_length=1, max_length=Customer.internal_id.property.columns[0].type.length)
- samples: conlist(Any, min_items=1)
- skip_reception_control: bool | None = None
- ticket: str | None
- order_type: OrderType | None = None
-
- @classmethod
- def parse_obj(cls, obj: dict, project: OrderType) -> "OrderIn":
- parsed_obj: OrderIn = super().parse_obj(obj)
- parsed_obj.parse_samples(project=project)
- parsed_obj.order_type = project
- return parsed_obj
-
- def parse_samples(self, project: OrderType) -> None:
- """
- Parses samples of by the type given by the project
-
- Parameters:
- project (OrderType): type of project
-
- Returns:
- Nothing
- """
- parsed_samples = []
-
- sample: dict
- for sample in self.samples:
- parsed_sample = sample_class_for(project=project).parse_obj(sample)
- parsed_sample.skip_reception_control = self.skip_reception_control
- parsed_samples.append(parsed_sample)
- self.samples = parsed_samples
diff --git a/cg/models/orders/samples.py b/cg/models/orders/samples.py
deleted file mode 100644
index 66c4d620d8..0000000000
--- a/cg/models/orders/samples.py
+++ /dev/null
@@ -1,350 +0,0 @@
-from pydantic.v1 import BaseModel, constr, validator
-
-from cg.constants import DataDelivery
-from cg.constants.constants import GenomeVersion, Workflow
-from cg.constants.orderforms import ORIGINAL_LAB_ADDRESSES, REGION_CODES
-from cg.models.orders.constants import OrderType
-from cg.models.orders.sample_base import (
- NAME_PATTERN,
- ContainerEnum,
- ControlEnum,
- PriorityEnum,
- SexEnum,
- StatusEnum,
-)
-from cg.store.models import Application, Case, Organism, Panel, Pool, Sample
-
-
-class OptionalIntValidator:
- @classmethod
- def str_to_int(cls, v: str) -> int | None:
- return int(v) if v else None
-
-
-class OptionalFloatValidator:
- @classmethod
- def str_to_float(cls, v: str) -> float | None:
- return float(v) if v else None
-
-
-class OrderInSample(BaseModel):
- # Order portal specific
- internal_id: constr(max_length=Sample.internal_id.property.columns[0].type.length) | None
- _suitable_project: OrderType = None
- application: constr(max_length=Application.tag.property.columns[0].type.length)
- comment: constr(max_length=Sample.comment.property.columns[0].type.length) | None
- skip_reception_control: bool | None = None
- data_analysis: Workflow
- data_delivery: DataDelivery
- name: constr(
- regex=NAME_PATTERN,
- min_length=2,
- max_length=Sample.name.property.columns[0].type.length,
- )
- priority: PriorityEnum = PriorityEnum.standard
- require_qc_ok: bool = False
- volume: str
- concentration_ng_ul: str | None
-
- @classmethod
- def is_sample_for(cls, project: OrderType):
- return project == cls._suitable_project
-
-
-class Of1508Sample(OrderInSample):
- # Orderform 1508
- # Order portal specific
- internal_id: constr(max_length=Sample.internal_id.property.columns[0].type.length) | None
- # "required for new samples"
- name: (
- constr(
- regex=NAME_PATTERN,
- min_length=2,
- max_length=Sample.name.property.columns[0].type.length,
- )
- | None
- )
-
- # customer
- age_at_sampling: float | None
- family_name: constr(
- regex=NAME_PATTERN,
- min_length=2,
- max_length=Case.name.property.columns[0].type.length,
- )
- case_internal_id: constr(max_length=Sample.internal_id.property.columns[0].type.length) | None
- sex: SexEnum = SexEnum.unknown
- tumour: bool = False
- source: str | None
- control: ControlEnum | None
- volume: str | None
- container: ContainerEnum | None
- # "required if plate for new samples"
- container_name: str | None
- well_position: str | None
- # "Required if samples are part of trio/family"
- mother: (
- constr(regex=NAME_PATTERN, max_length=Sample.name.property.columns[0].type.length) | None
- )
- father: (
- constr(regex=NAME_PATTERN, max_length=Sample.name.property.columns[0].type.length) | None
- )
- # This information is required for panel analysis
- capture_kit: str | None
- # This information is required for panel- or exome analysis
- elution_buffer: str | None
- tumour_purity: int | None
- # "This information is optional for FFPE-samples for new samples"
- formalin_fixation_time: int | None
- post_formalin_fixation_time: int | None
- tissue_block_size: str | None
- # "Not Required"
- cohorts: list[str] | None
- phenotype_groups: list[str] | None
- phenotype_terms: list[str] | None
- require_qc_ok: bool = False
- quantity: int | None
- subject_id: (
- constr(regex=NAME_PATTERN, max_length=Sample.subject_id.property.columns[0].type.length)
- | None
- )
- synopsis: str | None
-
- @validator("container", "container_name", "name", "source", "subject_id", "volume")
- def required_for_new_samples(cls, value, values, **kwargs):
- if not value and not values.get("internal_id"):
- raise ValueError(f"required for new sample {values.get('name')}")
- return value
-
- @validator(
- "tumour_purity",
- "formalin_fixation_time",
- "post_formalin_fixation_time",
- "quantity",
- pre=True,
- )
- def str_to_int(cls, v: str) -> int | None:
- return OptionalIntValidator.str_to_int(v=v)
-
- @validator(
- "age_at_sampling",
- "volume",
- pre=True,
- )
- def str_to_float(cls, v: str) -> float | None:
- return OptionalFloatValidator.str_to_float(v=v)
-
-
-class MipDnaSample(Of1508Sample):
- _suitable_project = OrderType.MIP_DNA
- # "Required if data analysis in Scout or vcf delivery"
- panels: list[constr(min_length=1, max_length=Panel.abbrev.property.columns[0].type.length)]
- status: StatusEnum
-
-
-class BalsamicSample(Of1508Sample):
- _suitable_project = OrderType.BALSAMIC
-
-
-class BalsamicQCSample(Of1508Sample):
- _suitable_project = OrderType.BALSAMIC_QC
- reference_genome: GenomeVersion | None
-
-
-class BalsamicUmiSample(Of1508Sample):
- _suitable_project = OrderType.BALSAMIC_UMI
-
-
-class MipRnaSample(Of1508Sample):
- _suitable_project = OrderType.MIP_RNA
-
-
-class RnafusionSample(Of1508Sample):
- _suitable_project = OrderType.RNAFUSION
-
-
-class TomteSample(MipDnaSample):
- _suitable_project = OrderType.TOMTE
- reference_genome: GenomeVersion | None
-
-
-class FastqSample(OrderInSample):
- _suitable_project = OrderType.FASTQ
-
- # Orderform 1508
- # "required"
- container: ContainerEnum | None
- sex: SexEnum = SexEnum.unknown
- source: str
- tumour: bool
- # "required if plate"
- container_name: str | None
- well_position: str | None
- elution_buffer: str
- # This information is required for panel analysis
- capture_kit: str | None
- # "Not Required"
- quantity: int | None
- subject_id: str | None
-
- @validator("quantity", pre=True)
- def str_to_int(cls, v: str) -> int | None:
- return OptionalIntValidator.str_to_int(v=v)
-
-
-class PacBioSample(OrderInSample):
- _suitable_project = OrderType.PACBIO_LONG_READ
-
- container: ContainerEnum
- container_name: str | None = None
- sex: SexEnum = SexEnum.unknown
- source: str
- subject_id: str
- tumour: bool
- well_position: str | None = None
-
-
-class RmlSample(OrderInSample):
- _suitable_project = OrderType.RML
-
- # 1604 Orderform Ready made libraries (RML)
- # Order portal specific
- # "This information is required"
- pool: constr(max_length=Pool.name.property.columns[0].type.length)
- concentration: float
- concentration_sample: float | None
- index: str
- index_number: str | None
- # "Required if Plate"
- rml_plate_name: str | None
- well_position_rml: str | None
- # "Automatically generated (if not custom) or custom"
- index_sequence: str | None
- # "Not required"
- control: str | None
-
- @validator("concentration_sample", pre=True)
- def str_to_float(cls, v: str) -> float | None:
- return OptionalFloatValidator.str_to_float(v=v)
-
-
-class FluffySample(RmlSample):
- _suitable_project = OrderType.FLUFFY
- # 1604 Orderform Ready made libraries (RML)
-
-
-class MetagenomeSample(Of1508Sample):
- _suitable_project = OrderType.METAGENOME
- # "This information is required"
- source: str
- # "This information is not required"
- concentration_sample: float | None
- family_name: None = None
- subject_id: None = None
-
- @validator("concentration_sample", pre=True)
- def str_to_float(cls, v: str) -> float | None:
- return OptionalFloatValidator.str_to_float(v=v)
-
- @validator("subject_id", pre=True)
- def required_for_new_samples(cls, v: str) -> None:
- """Overrides the parent validator since subject_id is optional for these samples."""
- return None
-
-
-class TaxprofilerSample(MetagenomeSample):
- _suitable_project = OrderType.TAXPROFILER
-
-
-class MicrobialSample(OrderInSample):
- # 1603 Orderform Microbial WHOLE_GENOME_SEQUENCING
- # "These fields are required"
- organism: constr(max_length=Organism.internal_id.property.columns[0].type.length)
- reference_genome: constr(max_length=Sample.reference_genome.property.columns[0].type.length)
- elution_buffer: str
- extraction_method: str | None
- container: ContainerEnum
- # "Required if Plate"
- container_name: str | None
- well_position: str | None
- # "Required if "Other" is chosen in column "Species""
- organism_other: constr(max_length=Organism.internal_id.property.columns[0].type.length) | None
- verified_organism: bool | None # sent to LIMS
- control: str | None
-
-
-class MicrobialFastqSample(OrderInSample):
- _suitable_project = OrderType.MICROBIAL_FASTQ
-
- elution_buffer: str
- container: ContainerEnum
- # "Required if Plate"
- container_name: str | None
- well_position: str | None
- # "These fields are not required"
- control: str | None
-
-
-class MicrosaltSample(MicrobialSample):
- _suitable_project = OrderType.MICROSALT
- # 1603 Orderform Microbial WHOLE_GENOME_SEQUENCING
-
-
-class SarsCov2Sample(MicrobialSample):
- _suitable_project = OrderType.SARS_COV_2
-
- # 2184 Orderform SARS-COV-2
- # "These fields are required"
- collection_date: str
- lab_code: str = None
- primer: str
- original_lab: str
- original_lab_address: str = None
- pre_processing_method: str
- region: str
- region_code: str = None
- selection_criteria: str
- volume: str | None
-
- @validator("lab_code", pre=True, always=True)
- def set_lab_code(cls, value):
- return "SE100 Karolinska"
-
- @validator("region_code", pre=True, always=True)
- def set_region_code(cls, value, values):
- return value if value else REGION_CODES[values["region"]]
-
- @validator("original_lab_address", pre=True, always=True)
- def set_original_lab_address(cls, value, values):
- return value if value else ORIGINAL_LAB_ADDRESSES[values["original_lab"]]
-
-
-def sample_class_for(project: OrderType):
- """Get the sample class for the specified project
-
- Args:
- project (OrderType): Project to get sample subclass for
- Returns:
- Subclass of OrderInSample
- """
-
- def all_subclasses(cls):
- """Get all subclasses recursively for a class
-
- Args:
- cls (Class): Class to get all subclasses for
- Returns:
- Set of Subclasses of cls
- """
- if cls.__subclasses__():
- return set(cls.__subclasses__()).union(
- [s for c in cls.__subclasses__() for s in all_subclasses(c)]
- )
-
- return []
-
- for sub_cls in all_subclasses(OrderInSample):
- if sub_cls.is_sample_for(project):
- return sub_cls
-
- raise ValueError
diff --git a/cg/resources/raredisease_bundle_filenames.yaml b/cg/resources/raredisease_bundle_filenames.yaml
index f09adeea2e..21ca8587c4 100644
--- a/cg/resources/raredisease_bundle_filenames.yaml
+++ b/cg/resources/raredisease_bundle_filenames.yaml
@@ -200,7 +200,7 @@
id: CASEID
path: PATHTOCASE/repeat_expansions/CASEID_repeat_expansion_stranger.vcf.gz
path_index: ~
- step: expansionhunter_stanger
+ step: expansionhunter_stranger
tag: case_sv_str
- format: vcf
id: CASEID
@@ -647,302 +647,302 @@
step: chromograph_rhoviz
tag: autozyg
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_1.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_rhocall_filter_cadd_vep_upd_regions_1.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_2.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_2.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_3.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_3.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_4.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_4.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_5.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_5.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_6.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_6.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_7.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_7.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_8.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_8.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_9.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_9.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_10.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_10.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_11.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_11.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_12.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_12.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_13.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_13.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_14.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_14.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_15.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_15.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_16.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_16.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_17.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_17.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_18.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_18.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_19.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_19.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_20.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_20.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_21.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_21.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_22.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_22.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_X.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_X.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_Y.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_Y.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_regions_chromograph/CASEID_rhocall_filter_cadd_vep_upd_regions_M.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_regions/SAMPLEID_upd_regions_M.png
path_index: ~
step: chromograph_upd
tag: regions
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_1.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_1.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_2.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_2.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_3.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_3.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_4.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_4.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_5.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_5.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_6.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_6.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_7.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_7.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_8.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_8.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_9.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_9.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_10.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_10.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_11.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_11.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_12.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_12.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_13.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_13.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_14.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_14.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_15.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_15.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_16.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_16.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_17.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_17.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_18.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_18.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_19.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_19.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_20.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_20.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_21.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_21.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_22.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_22.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_X.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_X.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_Y.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_Y.png
path_index: ~
step: chromograph_upd
tag: sites
- format: png
- id: CASEID
- path: PATHTOCASE/annotate_snv/genome/CASEID_rhocall_filter_cadd_vep_upd_sites_chromograph/CASEID_rhocall_filter_cadd_vep_upd_sites_M.png
+ id: SAMPLEID
+ path: PATHTOCASE/annotate_snv/genome/SAMPLEID_chromograph_sites/SAMPLEID_upd_sites_M.png
path_index: ~
step: chromograph_upd
tag: sites
diff --git a/cg/server/admin.py b/cg/server/admin.py
index 6fa37ae6f7..6e981f7a41 100644
--- a/cg/server/admin.py
+++ b/cg/server/admin.py
@@ -631,6 +631,7 @@ class SampleView(BaseView):
"_phenotype_terms",
"links",
"mother_links",
+ "sequencing_metrics",
]
@staticmethod
diff --git a/cg/server/endpoints/cases.py b/cg/server/endpoints/cases.py
index 4d03f47c43..c81dda2e1d 100644
--- a/cg/server/endpoints/cases.py
+++ b/cg/server/endpoints/cases.py
@@ -1,7 +1,9 @@
import logging
from http import HTTPStatus
+
from flask import Blueprint, abort, g, jsonify, request
-from cg.exc import CaseNotFoundError, OrderMismatchError
+
+from cg.exc import CaseNotFoundError, CgDataError, OrderMismatchError
from cg.server.dto.delivery_message.delivery_message_request import DeliveryMessageRequest
from cg.server.dto.delivery_message.delivery_message_response import DeliveryMessageResponse
from cg.server.endpoints.utils import before_request
@@ -62,7 +64,7 @@ def get_cases_delivery_message():
delivery_message_request
)
return jsonify(response.model_dump()), HTTPStatus.OK
- except (CaseNotFoundError, OrderMismatchError) as error:
+ except (CaseNotFoundError, OrderMismatchError, CgDataError) as error:
return jsonify({"error": str(error)}), HTTPStatus.BAD_REQUEST
@@ -74,7 +76,7 @@ def get_case_delivery_message(case_id: str):
delivery_message_request
)
return jsonify(response.model_dump()), HTTPStatus.OK
- except CaseNotFoundError as error:
+ except (CaseNotFoundError, CgDataError) as error:
return jsonify({"error": str(error)}), HTTPStatus.BAD_REQUEST
diff --git a/cg/server/endpoints/orders.py b/cg/server/endpoints/orders.py
index 3d75a3072e..59afcaad76 100644
--- a/cg/server/endpoints/orders.py
+++ b/cg/server/endpoints/orders.py
@@ -17,19 +17,15 @@
from cg.constants.constants import FileFormat
from cg.exc import (
OrderError,
- OrderExistsError,
OrderFormError,
OrderNotDeliverableError,
OrderNotFoundError,
TicketCreationError,
)
from cg.io.controller import WriteStream
-from cg.meta.orders import OrdersAPI
-from cg.models.orders.order import OrderIn, OrderType
+from cg.models.orders.constants import OrderType
from cg.models.orders.orderform_schema import Orderform
-from cg.server.dto.delivery_message.delivery_message_response import (
- DeliveryMessageResponse,
-)
+from cg.server.dto.delivery_message.delivery_message_response import DeliveryMessageResponse
from cg.server.dto.orders.order_delivery_update_request import OrderOpenUpdateRequest
from cg.server.dto.orders.order_patch_request import OrderOpenPatch
from cg.server.dto.orders.orders_request import OrdersRequest
@@ -38,11 +34,12 @@
from cg.server.ext import (
db,
delivery_message_service,
- lims,
order_service,
- order_submitter_registry,
+ order_validation_service,
+ storing_service_registry,
ticket_handler,
)
+from cg.services.orders.submitter.service import OrderSubmitter
from cg.store.models import Application, Customer
ORDERS_BLUEPRINT = Blueprint("orders", __name__, url_prefix="/api/v1")
@@ -151,13 +148,12 @@ def create_order_from_form():
@ORDERS_BLUEPRINT.route("/submit_order/", methods=["POST"])
-def submit_order(order_type):
+def submit_order(order_type: OrderType):
"""Submit an order for samples."""
- api = OrdersAPI(
- lims=lims,
- status=db,
+ submitter = OrderSubmitter(
ticket_handler=ticket_handler,
- submitter_registry=order_submitter_registry,
+ storing_registry=storing_service_registry,
+ validation_service=order_validation_service,
)
error_message: str
try:
@@ -168,22 +164,15 @@ def submit_order(order_type):
content=request_json, file_format=FileFormat.JSON
),
)
- project = OrderType(order_type)
- order_in = OrderIn.parse_obj(request_json, project=project)
- existing_ticket: str | None = ticket_handler.parse_ticket_number(order_in.name)
- if existing_ticket and order_service.store.get_order_by_ticket_id(existing_ticket):
- raise OrderExistsError(f"Order with ticket id {existing_ticket} already exists.")
- result: dict = api.submit(
- project=project,
- order_in=order_in,
- user_name=g.current_user.name,
- user_mail=g.current_user.email,
+ result: dict = submitter.submit(
+ raw_order=request_json,
+ order_type=order_type,
+ user=g.current_user,
)
except ( # user misbehaviour
OrderError,
- OrderExistsError,
OrderFormError,
ValidationError,
ValueError,
@@ -259,3 +248,12 @@ def get_options():
panels=[panel.abbrev for panel in db.get_panels()],
sources=source_groups,
)
+
+
+@ORDERS_BLUEPRINT.route("/validate_order/", methods=["POST"])
+def validate_order(order_type: OrderType):
+ raw_order = request.get_json()
+ response = order_validation_service.get_validation_response(
+ raw_order=raw_order, order_type=order_type, user_id=g.current_user.id
+ )
+ return jsonify(response), HTTPStatus.OK
diff --git a/cg/server/endpoints/samples.py b/cg/server/endpoints/samples.py
index 007a345782..4ee0d8d5f8 100644
--- a/cg/server/endpoints/samples.py
+++ b/cg/server/endpoints/samples.py
@@ -2,9 +2,7 @@
from flask import Blueprint, abort, g, jsonify, request
-from cg.server.dto.samples.collaborator_samples_request import (
- CollaboratorSamplesRequest,
-)
+from cg.server.dto.samples.collaborator_samples_request import CollaboratorSamplesRequest
from cg.server.dto.samples.samples_response import SamplesResponse
from cg.server.endpoints.utils import before_request
from cg.server.ext import db, sample_service
diff --git a/cg/server/ext.py b/cg/server/ext.py
index a69f9ec573..a1f0d97cec 100644
--- a/cg/server/ext.py
+++ b/cg/server/ext.py
@@ -8,16 +8,17 @@
from cg.apps.lims import LimsAPI
from cg.apps.tb.api import TrailblazerAPI
from cg.clients.freshdesk.freshdesk_client import FreshdeskClient
-from cg.meta.orders.ticket_handler import TicketHandler
from cg.server.app_config import app_config
from cg.services.application.service import ApplicationsWebService
from cg.services.delivery_message.delivery_message_service import DeliveryMessageService
from cg.services.orders.order_service.order_service import OrderService
from cg.services.orders.order_summary_service.order_summary_service import OrderSummaryService
-from cg.services.orders.submitters.order_submitter_registry import (
- OrderSubmitterRegistry,
- setup_order_submitter_registry,
+from cg.services.orders.storing.service_registry import (
+ StoringServiceRegistry,
+ setup_storing_service_registry,
)
+from cg.services.orders.submitter.ticket_handler import TicketHandler
+from cg.services.orders.validation.service import OrderValidationService
from cg.services.sample_run_metrics_service.sample_run_metrics_service import (
SampleRunMetricsService,
)
@@ -92,10 +93,12 @@ def init_app(self, app):
order_service = OrderService(store=db, status_service=summary_service)
sample_service = SampleService(db)
flow_cell_service = SampleRunMetricsService(db)
-order_submitter_registry: OrderSubmitterRegistry = setup_order_submitter_registry(
+storing_service_registry: StoringServiceRegistry = setup_storing_service_registry(
lims=lims,
status_db=db,
)
+
+order_validation_service = OrderValidationService(store=db)
freshdesk_client = FreshdeskClient(
base_url=app_config.freshdesk_url, api_key=app_config.freshdesk_api_key
)
diff --git a/cg/services/deliver_files/constants.py b/cg/services/deliver_files/constants.py
new file mode 100644
index 0000000000..b126b7cf09
--- /dev/null
+++ b/cg/services/deliver_files/constants.py
@@ -0,0 +1,23 @@
+from enum import Enum
+
+
+class DeliveryDestination(Enum):
+ """Enum for the DeliveryDestination
+ BASE: Deliver to the base folder provided in the call
+ CUSTOMER: Deliver to the customer folder on hasta
+ FOHM: Deliver to the FOHM folder on hasta
+ """
+
+ BASE = "base"
+ CUSTOMER = "customer"
+ FOHM = "fohm"
+
+
+class DeliveryStructure(Enum):
+ """Enum for the DeliveryStructure
+ FLAT: Deliver the files in a flat structure, i.e. all files in the same folder
+ NESTED: Deliver the files in a nested structure, i.e. files in folders for each sample/case
+ """
+
+ FLAT: str = "flat"
+ NESTED: str = "nested"
diff --git a/cg/services/deliver_files/deliver_files_service/deliver_files_service.py b/cg/services/deliver_files/deliver_files_service/deliver_files_service.py
index 6b45433885..46cfe38a65 100644
--- a/cg/services/deliver_files/deliver_files_service/deliver_files_service.py
+++ b/cg/services/deliver_files/deliver_files_service/deliver_files_service.py
@@ -4,17 +4,19 @@
from cg.apps.tb import TrailblazerAPI
from cg.apps.tb.models import TrailblazerAnalysis
from cg.constants import Priority, Workflow
-from cg.constants.tb import AnalysisTypes
+from cg.constants.tb import AnalysisType
+from cg.meta.workflow.utils.utils import MAP_TO_TRAILBLAZER_PRIORITY
from cg.services.analysis_service.analysis_service import AnalysisService
from cg.services.deliver_files.deliver_files_service.error_handling import (
handle_no_delivery_files_error,
)
from cg.services.deliver_files.file_fetcher.abstract import FetchDeliveryFilesService
from cg.services.deliver_files.file_fetcher.models import DeliveryFiles
-from cg.services.deliver_files.file_filter.abstract import FilterDeliveryFilesService
-from cg.services.deliver_files.file_formatter.abstract import DeliveryFileFormattingService
-from cg.services.deliver_files.file_formatter.models import FormattedFiles
-from cg.services.deliver_files.file_mover.service import DeliveryFilesMover
+from cg.services.deliver_files.file_formatter.destination.abstract import (
+ DeliveryDestinationFormatter,
+)
+from cg.services.deliver_files.file_formatter.destination.models import FormattedFiles
+from cg.services.deliver_files.file_mover.abstract import DestinationFilesMover
from cg.services.deliver_files.rsync.service import DeliveryRsyncService
from cg.store.exc import EntryNotFoundError
from cg.store.models import Case
@@ -25,27 +27,28 @@
class DeliverFilesService:
"""
- Deliver files to the customer inbox on the HPC and Rsync them to the inbox folder on the delivery server.
- 1. Get the files to deliver from Housekeeper based on workflow and data delivery
- 2. Create a delivery folder structure in the customer folder on Hasta and move the files there
- 3. Reformatting of output / renaming of files
- 4. Rsync the files to the customer inbox on the delivery server
- 5. Add the rsync job to Trailblazer
+ Deliver files for a case, cases in a ticket or a sample to a specified destination or upload location.
+ Requires:
+ - FetchDeliveryFilesService: Service to fetch the files to deliver from housekeeper
+ - DestinationFilesMover: Service to move the files to the destination of delivery or upload
+ - DeliveryDestinationFormatter: Service to format the files to the destination format
+ - DeliveryRsyncService: Service to run rsync for the delivery
+ - TrailblazerAPI: Service to interact with Trailblazer
+ - AnalysisService: Service to interact with the analysis
+ - Store: Store to interact with the database
"""
def __init__(
self,
delivery_file_manager_service: FetchDeliveryFilesService,
- file_filter: FilterDeliveryFilesService,
- move_file_service: DeliveryFilesMover,
- file_formatter_service: DeliveryFileFormattingService,
+ move_file_service: DestinationFilesMover,
+ file_formatter_service: DeliveryDestinationFormatter,
rsync_service: DeliveryRsyncService,
tb_service: TrailblazerAPI,
analysis_service: AnalysisService,
status_db: Store,
):
self.file_manager = delivery_file_manager_service
- self.file_filter = file_filter
self.file_mover = move_file_service
self.file_formatter = file_formatter_service
self.status_db = status_db
@@ -57,14 +60,22 @@ def __init__(
def deliver_files_for_case(
self, case: Case, delivery_base_path: Path, dry_run: bool = False
) -> None:
- """Deliver the files for a case to the customer folder."""
+ """Deliver the files for a case to the customer folder.
+ args:
+ case: The case to deliver files for
+ delivery_base_path: The base path to deliver the files to
+ dry_run: Whether to perform a dry run or not
+ """
delivery_files: DeliveryFiles = self.file_manager.get_files_to_deliver(
case_id=case.internal_id
)
moved_files: DeliveryFiles = self.file_mover.move_files(
delivery_files=delivery_files, delivery_base_path=delivery_base_path
)
- formatted_files: FormattedFiles = self.file_formatter.format_files(moved_files)
+ formatted_files: FormattedFiles = self.file_formatter.format_files(
+ delivery_files=moved_files
+ )
+
folders_to_deliver: set[Path] = set(
[formatted_file.formatted_path.parent for formatted_file in formatted_files.files]
)
@@ -76,7 +87,12 @@ def deliver_files_for_case(
def deliver_files_for_ticket(
self, ticket_id: str, delivery_base_path: Path, dry_run: bool = False
) -> None:
- """Deliver the files for all cases in a ticket to the customer folder."""
+ """Deliver the files for all cases in a ticket to the customer folder.
+ args:
+ ticket_id: The ticket id to deliver files for
+ delivery_base_path: The base path to deliver the files to
+ dry_run: Whether to perform a dry run or not
+ """
cases: list[Case] = self.status_db.get_cases_by_ticket_id(ticket_id)
if not cases:
raise EntryNotFoundError(f"No cases found for ticket {ticket_id}")
@@ -90,15 +106,14 @@ def deliver_files_for_sample(
):
"""Deliver the files for a sample to the customer folder."""
delivery_files: DeliveryFiles = self.file_manager.get_files_to_deliver(
- case_id=case.internal_id
- )
- filtered_files: DeliveryFiles = self.file_filter.filter_delivery_files(
- delivery_files=delivery_files, sample_id=sample_id
+ case_id=case.internal_id, sample_id=sample_id
)
moved_files: DeliveryFiles = self.file_mover.move_files(
- delivery_files=filtered_files, delivery_base_path=delivery_base_path
+ delivery_files=delivery_files, delivery_base_path=delivery_base_path
+ )
+ formatted_files: FormattedFiles = self.file_formatter.format_files(
+ delivery_files=moved_files
)
- formatted_files: FormattedFiles = self.file_formatter.format_files(moved_files)
folders_to_deliver: set[Path] = set(
[formatted_file.formatted_path.parent for formatted_file in formatted_files.files]
)
@@ -107,7 +122,31 @@ def deliver_files_for_sample(
)
self._add_trailblazer_tracking(case=case, job_id=job_id, dry_run=dry_run)
+ def deliver_files_for_sample_no_rsync(
+ self, case: Case, sample_id: str, delivery_base_path: Path
+ ):
+ """
+ Deliver the files for a sample to the delivery base path. Does not perform rsync.
+ args:
+ case: The case to deliver files for
+ sample_id: The sample to deliver files for
+ delivery_base_path: The base path to deliver the files to
+ """
+ delivery_files: DeliveryFiles = self.file_manager.get_files_to_deliver(
+ case_id=case.internal_id, sample_id=sample_id
+ )
+ moved_files: DeliveryFiles = self.file_mover.move_files(
+ delivery_files=delivery_files, delivery_base_path=delivery_base_path
+ )
+ self.file_formatter.format_files(delivery_files=moved_files)
+
def _start_rsync_job(self, case: Case, dry_run: bool, folders_to_deliver: set[Path]) -> int:
+ """Start a rsync job for the case.
+ args:
+ case: The case to start the rsync job for
+ dry_run: Whether to perform a dry run or not
+ folders_to_deliver: The folders to deliver
+ """
LOG.debug(f"[RSYNC] Starting rsync job for case {case.internal_id}")
job_id: int = self.rsync_service.run_rsync_for_case(
case=case,
@@ -122,17 +161,23 @@ def _start_rsync_job(self, case: Case, dry_run: bool, folders_to_deliver: set[Pa
return job_id
def _add_trailblazer_tracking(self, case: Case, job_id: int, dry_run: bool) -> None:
+ """Add the rsync job to Trailblazer for tracking.
+ args:
+ case: The case to add the job for
+ job_id: The job id to add for trailblazer tracking
+ dry_run: Whether to perform a dry run or not
+ """
if dry_run:
LOG.info(f"Would have added the analysis for case {case.internal_id} to Trailblazer")
else:
LOG.debug(f"[TB SERVICE] Adding analysis for case {case.internal_id} to Trailblazer")
analysis: TrailblazerAnalysis = self.tb_service.add_pending_analysis(
case_id=f"{case.internal_id}_rsync",
- analysis_type=AnalysisTypes.OTHER,
+ analysis_type=AnalysisType.OTHER,
config_path=self.rsync_service.trailblazer_config_path.as_posix(),
order_id=case.latest_order.id,
out_dir=self.rsync_service.log_dir.as_posix(),
- slurm_quality_of_service=Priority.priority_to_slurm_qos().get(case.priority),
+ priority=MAP_TO_TRAILBLAZER_PRIORITY[case.priority],
workflow=Workflow.RSYNC,
ticket=case.latest_ticket,
)
diff --git a/cg/services/deliver_files/deliver_files_service/deliver_files_service_factory.py b/cg/services/deliver_files/deliver_files_service/deliver_files_service_factory.py
deleted file mode 100644
index 45854fce29..0000000000
--- a/cg/services/deliver_files/deliver_files_service/deliver_files_service_factory.py
+++ /dev/null
@@ -1,175 +0,0 @@
-"""Module for the factory of the deliver files service."""
-
-from typing import Type
-
-from cg.apps.housekeeper.hk import HousekeeperAPI
-from cg.apps.tb import TrailblazerAPI
-from cg.constants import DataDelivery, Workflow
-from cg.constants.sequencing import SeqLibraryPrepCategory
-from cg.services.analysis_service.analysis_service import AnalysisService
-from cg.services.deliver_files.deliver_files_service.deliver_files_service import (
- DeliverFilesService,
-)
-from cg.services.deliver_files.deliver_files_service.exc import DeliveryTypeNotSupported
-from cg.services.deliver_files.file_fetcher.abstract import FetchDeliveryFilesService
-from cg.services.deliver_files.file_fetcher.analysis_raw_data_service import (
- RawDataAndAnalysisDeliveryFileFetcher,
-)
-from cg.services.deliver_files.file_fetcher.analysis_service import AnalysisDeliveryFileFetcher
-from cg.services.deliver_files.file_fetcher.raw_data_service import RawDataDeliveryFileFetcher
-from cg.services.deliver_files.file_filter.sample_service import SampleFileFilter
-from cg.services.deliver_files.file_formatter.abstract import DeliveryFileFormattingService
-from cg.services.deliver_files.file_formatter.service import DeliveryFileFormatter
-from cg.services.deliver_files.file_formatter.utils.case_service import CaseFileFormatter
-from cg.services.deliver_files.file_formatter.utils.sample_concatenation_service import (
- SampleFileConcatenationFormatter,
-)
-from cg.services.deliver_files.file_formatter.utils.sample_service import (
- SampleFileFormatter,
- FileManagingService,
- SampleFileNameFormatter,
-)
-from cg.services.deliver_files.file_mover.service import DeliveryFilesMover
-from cg.services.deliver_files.rsync.service import DeliveryRsyncService
-from cg.services.deliver_files.tag_fetcher.abstract import FetchDeliveryFileTagsService
-from cg.services.deliver_files.tag_fetcher.bam_service import BamDeliveryTagsFetcher
-from cg.services.deliver_files.tag_fetcher.sample_and_case_service import (
- SampleAndCaseDeliveryTagsFetcher,
-)
-from cg.services.fastq_concatenation_service.fastq_concatenation_service import (
- FastqConcatenationService,
-)
-from cg.store.models import Case
-from cg.store.store import Store
-
-
-class DeliveryServiceFactory:
- """Class to build the delivery services based on workflow and delivery type."""
-
- def __init__(
- self,
- store: Store,
- hk_api: HousekeeperAPI,
- rsync_service: DeliveryRsyncService,
- tb_service: TrailblazerAPI,
- analysis_service: AnalysisService,
- ):
- self.store = store
- self.hk_api = hk_api
- self.rsync_service = rsync_service
- self.tb_service = tb_service
- self.analysis_service = analysis_service
-
- @staticmethod
- def _sanitise_delivery_type(delivery_type: DataDelivery) -> DataDelivery:
- """Sanitise the delivery type."""
- if delivery_type in [DataDelivery.FASTQ_QC, DataDelivery.FASTQ_SCOUT]:
- return DataDelivery.FASTQ
- if delivery_type in [DataDelivery.ANALYSIS_SCOUT]:
- return DataDelivery.ANALYSIS_FILES
- if delivery_type in [
- DataDelivery.FASTQ_ANALYSIS_SCOUT,
- DataDelivery.FASTQ_QC_ANALYSIS,
- ]:
- return DataDelivery.FASTQ_ANALYSIS
- return delivery_type
-
- @staticmethod
- def _validate_delivery_type(delivery_type: DataDelivery):
- """Check if the delivery type is supported. Raises DeliveryTypeNotSupported error."""
- if delivery_type in [
- DataDelivery.FASTQ,
- DataDelivery.ANALYSIS_FILES,
- DataDelivery.FASTQ_ANALYSIS,
- DataDelivery.BAM,
- ]:
- return
- raise DeliveryTypeNotSupported(
- f"Delivery type {delivery_type} is not supported. Supported delivery types are"
- f" {DataDelivery.FASTQ}, {DataDelivery.ANALYSIS_FILES},"
- f" {DataDelivery.FASTQ_ANALYSIS}, {DataDelivery.BAM}."
- )
-
- @staticmethod
- def _get_file_tag_fetcher(delivery_type: DataDelivery) -> FetchDeliveryFileTagsService:
- """Get the file tag fetcher based on the delivery type."""
- service_map: dict[DataDelivery, Type[FetchDeliveryFileTagsService]] = {
- DataDelivery.FASTQ: SampleAndCaseDeliveryTagsFetcher,
- DataDelivery.ANALYSIS_FILES: SampleAndCaseDeliveryTagsFetcher,
- DataDelivery.FASTQ_ANALYSIS: SampleAndCaseDeliveryTagsFetcher,
- DataDelivery.BAM: BamDeliveryTagsFetcher,
- }
- return service_map[delivery_type]()
-
- def _get_file_fetcher(self, delivery_type: DataDelivery) -> FetchDeliveryFilesService:
- """Get the file fetcher based on the delivery type."""
- service_map: dict[DataDelivery, Type[FetchDeliveryFilesService]] = {
- DataDelivery.FASTQ: RawDataDeliveryFileFetcher,
- DataDelivery.ANALYSIS_FILES: AnalysisDeliveryFileFetcher,
- DataDelivery.FASTQ_ANALYSIS: RawDataAndAnalysisDeliveryFileFetcher,
- DataDelivery.BAM: RawDataDeliveryFileFetcher,
- }
- file_tag_fetcher: FetchDeliveryFileTagsService = self._get_file_tag_fetcher(delivery_type)
- return service_map[delivery_type](
- status_db=self.store,
- hk_api=self.hk_api,
- tags_fetcher=file_tag_fetcher,
- )
-
- def _convert_workflow(self, case: Case) -> Workflow:
- """Converts a workflow with the introduction of the microbial-fastq delivery type an
- unsupported combination of delivery type and workflow setup is required. This function
- makes sure that a raw data workflow with microbial fastq delivery type is treated as a
- microsalt workflow so that the microbial-fastq sample files can be concatenated."""
- tag: str = case.samples[0].application_version.application.tag
- microbial_tags: list[str] = [
- application.tag
- for application in self.store.get_active_applications_by_prep_category(
- prep_category=SeqLibraryPrepCategory.MICROBIAL
- )
- ]
- if case.data_analysis == Workflow.RAW_DATA and tag in microbial_tags:
- return Workflow.MICROSALT
- return case.data_analysis
-
- def _get_sample_file_formatter(
- self,
- case: Case,
- ) -> SampleFileFormatter | SampleFileConcatenationFormatter:
- """Get the file formatter service based on the workflow."""
- converted_workflow: Workflow = self._convert_workflow(case)
- if converted_workflow in [Workflow.MICROSALT]:
- return SampleFileConcatenationFormatter(
- file_manager=FileManagingService(),
- file_formatter=SampleFileNameFormatter(),
- concatenation_service=FastqConcatenationService(),
- )
- return SampleFileFormatter(
- file_manager=FileManagingService(), file_name_formatter=SampleFileNameFormatter()
- )
-
- def build_delivery_service(
- self, case: Case, delivery_type: DataDelivery | None = None
- ) -> DeliverFilesService:
- """Build a delivery service based on a case."""
- delivery_type: DataDelivery = self._sanitise_delivery_type(
- delivery_type if delivery_type else case.data_delivery
- )
- self._validate_delivery_type(delivery_type)
- file_fetcher: FetchDeliveryFilesService = self._get_file_fetcher(delivery_type)
- sample_file_formatter: SampleFileFormatter | SampleFileConcatenationFormatter = (
- self._get_sample_file_formatter(case)
- )
- file_formatter: DeliveryFileFormattingService = DeliveryFileFormatter(
- case_file_formatter=CaseFileFormatter(), sample_file_formatter=sample_file_formatter
- )
- return DeliverFilesService(
- delivery_file_manager_service=file_fetcher,
- move_file_service=DeliveryFilesMover(),
- file_filter=SampleFileFilter(),
- file_formatter_service=file_formatter,
- status_db=self.store,
- rsync_service=self.rsync_service,
- tb_service=self.tb_service,
- analysis_service=self.analysis_service,
- )
diff --git a/cg/services/deliver_files/factory.py b/cg/services/deliver_files/factory.py
new file mode 100644
index 0000000000..1701cba5fb
--- /dev/null
+++ b/cg/services/deliver_files/factory.py
@@ -0,0 +1,323 @@
+"""Module for the factory of the deliver files service."""
+
+from typing import Type
+
+from cg.apps.housekeeper.hk import HousekeeperAPI
+from cg.apps.lims import LimsAPI
+from cg.apps.tb import TrailblazerAPI
+from cg.constants import DataDelivery, Workflow
+from cg.constants.sequencing import SeqLibraryPrepCategory
+from cg.services.analysis_service.analysis_service import AnalysisService
+from cg.services.deliver_files.constants import DeliveryDestination, DeliveryStructure
+from cg.services.deliver_files.deliver_files_service.deliver_files_service import (
+ DeliverFilesService,
+)
+from cg.services.deliver_files.deliver_files_service.exc import DeliveryTypeNotSupported
+from cg.services.deliver_files.file_fetcher.abstract import FetchDeliveryFilesService
+from cg.services.deliver_files.file_fetcher.analysis_raw_data_service import (
+ RawDataAndAnalysisDeliveryFileFetcher,
+)
+from cg.services.deliver_files.file_fetcher.analysis_service import AnalysisDeliveryFileFetcher
+from cg.services.deliver_files.file_fetcher.raw_data_service import RawDataDeliveryFileFetcher
+from cg.services.deliver_files.file_formatter.destination.abstract import (
+ DeliveryDestinationFormatter,
+)
+from cg.services.deliver_files.file_formatter.destination.base_service import (
+ BaseDeliveryFormatter,
+)
+from cg.services.deliver_files.file_formatter.files.case_service import CaseFileFormatter
+from cg.services.deliver_files.file_formatter.files.mutant_service import (
+ MutantFileFormatter,
+)
+from cg.services.deliver_files.file_formatter.files.concatenation_service import (
+ SampleFileConcatenationFormatter,
+)
+from cg.services.deliver_files.file_formatter.files.sample_service import (
+ SampleFileFormatter,
+ FileManager,
+)
+from cg.services.deliver_files.file_formatter.path_name.abstract import PathNameFormatter
+from cg.services.deliver_files.file_formatter.path_name.flat_structure import (
+ FlatStructurePathFormatter,
+)
+from cg.services.deliver_files.file_formatter.path_name.nested_structure import (
+ NestedStructurePathFormatter,
+)
+from cg.services.deliver_files.file_mover.abstract import DestinationFilesMover
+from cg.services.deliver_files.file_mover.customer_inbox_service import (
+ CustomerInboxDestinationFilesMover,
+)
+from cg.services.deliver_files.file_mover.base_service import BaseDestinationFilesMover
+from cg.services.deliver_files.rsync.service import DeliveryRsyncService
+from cg.services.deliver_files.tag_fetcher.abstract import FetchDeliveryFileTagsService
+from cg.services.deliver_files.tag_fetcher.bam_service import BamDeliveryTagsFetcher
+from cg.services.deliver_files.tag_fetcher.fohm_upload_service import FOHMUploadTagsFetcher
+from cg.services.deliver_files.tag_fetcher.sample_and_case_service import (
+ SampleAndCaseDeliveryTagsFetcher,
+)
+from cg.services.deliver_files.utils import FileMover
+from cg.services.fastq_concatenation_service.fastq_concatenation_service import (
+ FastqConcatenationService,
+)
+from cg.store.models import Case
+from cg.store.store import Store
+
+
+class DeliveryServiceFactory:
+ """
+ Class to build the delivery services based on case, workflow, delivery type, delivery destination and delivery structure.
+ The delivery destination is used to specify delivery to the customer or for external upload.
+ Workflow is used to specify the workflow of the case and is required for the tag fetcher.
+ Delivery type is used to specify the type of delivery to perform.
+ Delivery structure is used to specify the structure of the delivery.
+ """
+
+ def __init__(
+ self,
+ store: Store,
+ lims_api: LimsAPI,
+ hk_api: HousekeeperAPI,
+ rsync_service: DeliveryRsyncService,
+ tb_service: TrailblazerAPI,
+ analysis_service: AnalysisService,
+ ):
+ self.store = store
+ self.lims_api = lims_api
+ self.hk_api = hk_api
+ self.rsync_service = rsync_service
+ self.tb_service = tb_service
+ self.analysis_service = analysis_service
+
+ @staticmethod
+ def _sanitise_delivery_type(delivery_type: DataDelivery) -> DataDelivery:
+ """Sanitise the delivery type.
+ We have multiple delivery types that are a combination of other delivery types or uploads.
+ Here we make sure to convert unsupported delivery types to supported ones.
+ args:
+ delivery_type: The type of delivery to perform.
+ """
+ if delivery_type in [DataDelivery.FASTQ_QC, DataDelivery.FASTQ_SCOUT]:
+ return DataDelivery.FASTQ
+ if delivery_type in [DataDelivery.ANALYSIS_SCOUT]:
+ return DataDelivery.ANALYSIS_FILES
+ if delivery_type in [
+ DataDelivery.FASTQ_ANALYSIS_SCOUT,
+ DataDelivery.FASTQ_QC_ANALYSIS,
+ ]:
+ return DataDelivery.FASTQ_ANALYSIS
+ return delivery_type
+
+ @staticmethod
+ def _validate_delivery_type(delivery_type: DataDelivery):
+ """
+ Check if the delivery type is supported. Raises DeliveryTypeNotSupported error.
+ args:
+ delivery_type: The type of delivery to perform.
+ """
+ if delivery_type in [
+ DataDelivery.FASTQ,
+ DataDelivery.ANALYSIS_FILES,
+ DataDelivery.FASTQ_ANALYSIS,
+ DataDelivery.BAM,
+ ]:
+ return
+ raise DeliveryTypeNotSupported(
+ f"Delivery type {delivery_type} is not supported. Supported delivery types are"
+ f" {DataDelivery.FASTQ}, {DataDelivery.ANALYSIS_FILES},"
+ f" {DataDelivery.FASTQ_ANALYSIS}, {DataDelivery.BAM}."
+ )
+
+ @staticmethod
+ def _get_file_tag_fetcher(
+ delivery_type: DataDelivery, delivery_destination: DeliveryDestination
+ ) -> FetchDeliveryFileTagsService:
+ """
+ Get the file tag fetcher based on the delivery type or delivery destination.
+ NOTE: added complexity to handle the FOHM delivery type as it requires a special set of tags as compared to customer delivery.
+ It overrides the default behaviour of the delivery type given by the case.
+ args:
+ delivery_type: The type of delivery to perform.
+ delivery_destination: The destination of the delivery defaults to customer.
+
+ """
+ if delivery_destination == DeliveryDestination.FOHM:
+ return FOHMUploadTagsFetcher()
+ service_map: dict[DataDelivery, Type[FetchDeliveryFileTagsService]] = {
+ DataDelivery.FASTQ: SampleAndCaseDeliveryTagsFetcher,
+ DataDelivery.ANALYSIS_FILES: SampleAndCaseDeliveryTagsFetcher,
+ DataDelivery.FASTQ_ANALYSIS: SampleAndCaseDeliveryTagsFetcher,
+ DataDelivery.BAM: BamDeliveryTagsFetcher,
+ }
+ return service_map[delivery_type]()
+
+ def _get_file_fetcher(
+ self, delivery_type: DataDelivery, delivery_destination: DeliveryDestination
+ ) -> FetchDeliveryFilesService:
+ """Get the file fetcher based on the delivery type.
+ args:
+ delivery_type: The type of delivery to perform.
+ delivery_destination: The destination of the delivery defaults to customer. See DeliveryDestination enum for explanation.
+
+ """
+ service_map: dict[DataDelivery, Type[FetchDeliveryFilesService]] = {
+ DataDelivery.FASTQ: RawDataDeliveryFileFetcher,
+ DataDelivery.ANALYSIS_FILES: AnalysisDeliveryFileFetcher,
+ DataDelivery.FASTQ_ANALYSIS: RawDataAndAnalysisDeliveryFileFetcher,
+ DataDelivery.BAM: RawDataDeliveryFileFetcher,
+ }
+ file_tag_fetcher: FetchDeliveryFileTagsService = self._get_file_tag_fetcher(
+ delivery_type=delivery_type, delivery_destination=delivery_destination
+ )
+ return service_map[delivery_type](
+ status_db=self.store,
+ hk_api=self.hk_api,
+ tags_fetcher=file_tag_fetcher,
+ )
+
+ def _convert_workflow(self, case: Case) -> Workflow:
+ """Change the workflow of a Microbial Fastq case to Microsalt to allow the concatenation of fastq files.
+ With the introduction of the microbial-fastq delivery type, an unsupported combination of delivery type and
+ workflow setup is required. This function makes sure that a raw data workflow with microbial fastq delivery
+ type is treated as a microsalt workflow so that the microbial-fastq sample files can be concatenated.
+ args:
+ case: The case to convert the workflow for
+ """
+ tag: str = case.samples[0].application_version.application.tag
+ microbial_tags: list[str] = [
+ application.tag
+ for application in self.store.get_active_applications_by_prep_category(
+ prep_category=SeqLibraryPrepCategory.MICROBIAL
+ )
+ ]
+ if case.data_analysis == Workflow.RAW_DATA and tag in microbial_tags:
+ return Workflow.MICROSALT
+ return case.data_analysis
+
+ def _get_sample_file_formatter(
+ self,
+ case: Case,
+ delivery_structure: DeliveryStructure = DeliveryStructure.NESTED,
+ ) -> SampleFileFormatter | SampleFileConcatenationFormatter | MutantFileFormatter:
+ """Get the file formatter service based on the workflow.
+ Depending on the delivery structure the path name formatter will be different.
+ Args:
+ case: The case to deliver files for.
+ delivery_structure: The structure of the delivery. See DeliveryStructure enum for explanation. Defaults to nested.
+ """
+
+ converted_workflow: Workflow = self._convert_workflow(case)
+ if converted_workflow in [Workflow.MICROSALT]:
+ return SampleFileConcatenationFormatter(
+ file_manager=FileManager(),
+ path_name_formatter=self._get_path_name_formatter(delivery_structure),
+ concatenation_service=FastqConcatenationService(),
+ )
+ if converted_workflow == Workflow.MUTANT:
+ return MutantFileFormatter(
+ lims_api=self.lims_api,
+ file_manager=FileManager(),
+ file_formatter=SampleFileConcatenationFormatter(
+ file_manager=FileManager(),
+ path_name_formatter=self._get_path_name_formatter(delivery_structure),
+ concatenation_service=FastqConcatenationService(),
+ ),
+ )
+ return SampleFileFormatter(
+ file_manager=FileManager(),
+ path_name_formatter=self._get_path_name_formatter(delivery_structure),
+ )
+
+ def _get_case_file_formatter(self, delivery_structure: DeliveryStructure) -> CaseFileFormatter:
+ """
+ Get the case file formatter based on the delivery structure.
+ args:
+ delivery_structure: The structure of the delivery. See DeliveryStructure enum for explanation.
+ """
+ return CaseFileFormatter(
+ file_manager=FileManager(),
+ path_name_formatter=self._get_path_name_formatter(delivery_structure),
+ )
+
+ @staticmethod
+ def _get_path_name_formatter(
+ delivery_structure: DeliveryStructure,
+ ) -> PathNameFormatter:
+ """
+ Get the path name formatter based on the delivery destination
+ args:
+ delivery_structure: The structure of the delivery. See DeliveryStructure enum for explanation.
+ """
+ if delivery_structure == DeliveryStructure.FLAT:
+ return FlatStructurePathFormatter()
+ return NestedStructurePathFormatter()
+
+ @staticmethod
+ def _get_file_mover(
+ delivery_destination: DeliveryDestination,
+ ) -> CustomerInboxDestinationFilesMover | BaseDestinationFilesMover:
+ """Get the file mover based on the delivery type.
+ args:
+ delivery_destination: The destination of the delivery. See DeliveryDestination enum for explanation.
+ """
+ if delivery_destination in [DeliveryDestination.BASE, DeliveryDestination.FOHM]:
+ return BaseDestinationFilesMover(FileMover(FileManager()))
+ return CustomerInboxDestinationFilesMover(FileMover(FileManager()))
+
+ def _get_file_formatter(
+ self,
+ delivery_structure: DeliveryStructure,
+ case: Case,
+ ) -> DeliveryDestinationFormatter:
+ """
+ Get the file formatter service based on the delivery destination.
+ args:
+ delivery_structure: The structure of the delivery. See DeliveryStructure enum for explanation.
+ case: The case to deliver files for.
+ """
+ sample_file_formatter: (
+ SampleFileFormatter | SampleFileConcatenationFormatter | MutantFileFormatter
+ ) = self._get_sample_file_formatter(case=case, delivery_structure=delivery_structure)
+ case_file_formatter: CaseFileFormatter = self._get_case_file_formatter(
+ delivery_structure=delivery_structure
+ )
+ return BaseDeliveryFormatter(
+ case_file_formatter=case_file_formatter,
+ sample_file_formatter=sample_file_formatter,
+ )
+
+ def build_delivery_service(
+ self,
+ case: Case,
+ delivery_type: DataDelivery | None = None,
+ delivery_destination: DeliveryDestination = DeliveryDestination.CUSTOMER,
+ delivery_structure: DeliveryStructure = DeliveryStructure.NESTED,
+ ) -> DeliverFilesService:
+ """Build a delivery service based on a case.
+ args:
+ case: The case to deliver files for.
+ delivery_type: The type of data delivery to perform. See DataDelivery enum for explanation.
+ delivery_destination: The destination of the delivery defaults to customer. See DeliveryDestination enum for explanation.
+ delivery_structure: The structure of the delivery defaults to nested. See DeliveryStructure enum for explanation.
+ """
+ delivery_type: DataDelivery = self._sanitise_delivery_type(
+ delivery_type if delivery_type else case.data_delivery
+ )
+ self._validate_delivery_type(delivery_type)
+ file_fetcher: FetchDeliveryFilesService = self._get_file_fetcher(
+ delivery_type=delivery_type, delivery_destination=delivery_destination
+ )
+ file_move_service: DestinationFilesMover = self._get_file_mover(
+ delivery_destination=delivery_destination
+ )
+ file_formatter: DeliveryDestinationFormatter = self._get_file_formatter(
+ case=case, delivery_structure=delivery_structure
+ )
+ return DeliverFilesService(
+ delivery_file_manager_service=file_fetcher,
+ move_file_service=file_move_service,
+ file_formatter_service=file_formatter,
+ status_db=self.store,
+ rsync_service=self.rsync_service,
+ tb_service=self.tb_service,
+ analysis_service=self.analysis_service,
+ )
diff --git a/cg/services/deliver_files/file_fetcher/abstract.py b/cg/services/deliver_files/file_fetcher/abstract.py
index 95c5d78a98..65fe83e6d5 100644
--- a/cg/services/deliver_files/file_fetcher/abstract.py
+++ b/cg/services/deliver_files/file_fetcher/abstract.py
@@ -29,6 +29,6 @@ def __init__(
self.tags_fetcher = tags_fetcher
@abstractmethod
- def get_files_to_deliver(self, case_id: str) -> DeliveryFiles:
+ def get_files_to_deliver(self, case_id: str, sample_id: str | None) -> DeliveryFiles:
"""Get the files to deliver."""
pass
diff --git a/cg/services/deliver_files/file_fetcher/analysis_raw_data_service.py b/cg/services/deliver_files/file_fetcher/analysis_raw_data_service.py
index 4eafa0210d..daceca0fb4 100644
--- a/cg/services/deliver_files/file_fetcher/analysis_raw_data_service.py
+++ b/cg/services/deliver_files/file_fetcher/analysis_raw_data_service.py
@@ -28,13 +28,19 @@ def __init__(
self.hk_api = hk_api
self.tags_fetcher = tags_fetcher
- def get_files_to_deliver(self, case_id: str) -> DeliveryFiles:
+ def get_files_to_deliver(self, case_id: str, sample_id: str | None = None) -> DeliveryFiles:
+ """
+ Get files to deliver for a case or sample for both analysis and raw data.
+ args:
+ case_id: The case id to deliver files for
+ sample_id: The sample id to deliver files for
+ """
case: Case = self.status_db.get_case_by_internal_id(internal_id=case_id)
fastq_files: DeliveryFiles = self._fetch_files(
- service_class=RawDataDeliveryFileFetcher, case_id=case_id
+ service_class=RawDataDeliveryFileFetcher, case_id=case_id, sample_id=sample_id
)
analysis_files: DeliveryFiles = self._fetch_files(
- service_class=AnalysisDeliveryFileFetcher, case_id=case_id
+ service_class=AnalysisDeliveryFileFetcher, case_id=case_id, sample_id=sample_id
)
delivery_data = DeliveryMetaData(
case_id=case.internal_id,
@@ -48,7 +54,15 @@ def get_files_to_deliver(self, case_id: str) -> DeliveryFiles:
sample_files=analysis_files.sample_files + fastq_files.sample_files,
)
- def _fetch_files(self, service_class: type, case_id: str) -> DeliveryFiles:
- """Fetch files using the provided service class."""
+ def _fetch_files(
+ self, service_class: type, case_id: str, sample_id: str | None
+ ) -> DeliveryFiles:
+ """Fetch files using the provided service class.
+ Wrapper to fetch files using the provided service class. This is either the RawDataDeliveryFileFetcher or the AnalysisDeliveryFileFetcher.
+ args:
+ service_class: The service class to use to fetch the files
+ case_id: The case id to fetch files for
+ sample_id: The sample id to fetch files for
+ """
service = service_class(self.status_db, self.hk_api, tags_fetcher=self.tags_fetcher)
- return service.get_files_to_deliver(case_id)
+ return service.get_files_to_deliver(case_id=case_id, sample_id=sample_id)
diff --git a/cg/services/deliver_files/file_fetcher/analysis_service.py b/cg/services/deliver_files/file_fetcher/analysis_service.py
index ed0072cb2b..43e0b2f920 100644
--- a/cg/services/deliver_files/file_fetcher/analysis_service.py
+++ b/cg/services/deliver_files/file_fetcher/analysis_service.py
@@ -38,12 +38,23 @@ def __init__(
self.hk_api = hk_api
self.tags_fetcher = tags_fetcher
- def get_files_to_deliver(self, case_id: str) -> DeliveryFiles:
- """Return a list of analysis files to be delivered for a case."""
- LOG.debug(f"[FETCH SERVICE] Fetching analysis files for case: {case_id}")
+ def get_files_to_deliver(self, case_id: str, sample_id: str | None = None) -> DeliveryFiles:
+ """Return a list of analysis files to be delivered for a case.
+ args:
+ case_id: The case id to deliver files for
+ sample_id: The sample id to deliver files for
+ """
+ LOG.debug(
+ f"[FETCH SERVICE] Fetching analysis files for case: {case_id}, sample: {sample_id}"
+ )
case: Case = self.status_db.get_case_by_internal_id(internal_id=case_id)
- analysis_case_files: list[CaseFile] = self._get_analysis_case_delivery_files(case)
- analysis_sample_files: list[SampleFile] = self._get_analysis_sample_delivery_files(case)
+ analysis_case_files: list[CaseFile] = self._get_analysis_case_delivery_files(
+ case=case, sample_id=sample_id
+ )
+
+ analysis_sample_files: list[SampleFile] = self._get_analysis_sample_delivery_files(
+ case=case, sample_id=sample_id
+ )
delivery_data = DeliveryMetaData(
case_id=case.internal_id,
customer_internal_id=case.customer.internal_id,
@@ -60,7 +71,13 @@ def get_files_to_deliver(self, case_id: str) -> DeliveryFiles:
@staticmethod
def _validate_delivery_has_content(delivery_files: DeliveryFiles) -> DeliveryFiles:
- """Check if the delivery files has files to deliver."""
+ """
+ Check if the delivery files has files to deliver.
+ raises:
+ NoDeliveryFilesError if no files to deliver.
+ args:
+ delivery_files: The delivery files to check
+ """
if delivery_files.case_files or delivery_files.sample_files:
return delivery_files
LOG.info(
@@ -71,9 +88,17 @@ def _validate_delivery_has_content(delivery_files: DeliveryFiles) -> DeliveryFil
@handle_missing_bundle_errors
def _get_sample_files_from_case_bundle(
self, workflow: Workflow, sample_id: str, case_id: str
- ) -> list[SampleFile]:
- """Return a list of files from a case bundle with a sample id as tag."""
+ ) -> list[SampleFile] | None:
+ """Return a list of files from a case bundle with a sample id as tag.
+ This is to fetch sample specific analysis files that are stored on the case level.
+ args:
+ workflow: The workflow to fetch files for
+ sample_id: The sample id to fetch files for
+ case_id: The case id to fetch files for
+ """
sample_tags: list[set[str]] = self.tags_fetcher.fetch_tags(workflow).sample_tags
+ if not sample_tags:
+ return []
sample_tags_with_sample_id: list[set[str]] = [tag | {sample_id} for tag in sample_tags]
sample_files: list[File] = self.hk_api.get_files_from_latest_version_containing_tags(
bundle_name=case_id, tags=sample_tags_with_sample_id
@@ -89,9 +114,16 @@ def _get_sample_files_from_case_bundle(
for sample_file in sample_files
]
- def _get_analysis_sample_delivery_files(self, case: Case) -> list[SampleFile] | None:
- """Return a all sample files to deliver for a case."""
- sample_ids: list[str] = case.sample_ids
+ def _get_analysis_sample_delivery_files(
+ self, case: Case, sample_id: str | None
+ ) -> list[SampleFile]:
+ """Return all sample files to deliver for a case.
+ Write a list of sample files to deliver for a case.
+ args:
+ case: The case to deliver files for
+ sample_id: The sample id to deliver files for
+ """
+ sample_ids: list[str] = [sample_id] if sample_id else case.sample_ids
delivery_files: list[SampleFile] = []
for sample_id in sample_ids:
sample_files: list[SampleFile] = self._get_sample_files_from_case_bundle(
@@ -101,13 +133,20 @@ def _get_analysis_sample_delivery_files(self, case: Case) -> list[SampleFile] |
return delivery_files
@handle_missing_bundle_errors
- def _get_analysis_case_delivery_files(self, case: Case) -> list[CaseFile]:
+ def _get_analysis_case_delivery_files(
+ self, case: Case, sample_id: str | None
+ ) -> list[CaseFile]:
"""
Return a complete list of analysis case files to be delivered and ignore analysis sample
- files.
+ files. This is to ensure that only case level analysis files are delivered.
+ args:
+ case: The case to deliver files for
+ sample_id: The sample id to deliver files for
"""
case_tags: list[set[str]] = self.tags_fetcher.fetch_tags(case.data_analysis).case_tags
- sample_id_tags: list[str] = case.sample_ids
+ if not case_tags:
+ return []
+ sample_id_tags: list[str] = [sample_id] if sample_id else case.sample_ids
case_files: list[File] = self.hk_api.get_files_from_latest_version_containing_tags(
bundle_name=case.internal_id, tags=case_tags, excluded_tags=sample_id_tags
)
diff --git a/cg/services/deliver_files/file_fetcher/models.py b/cg/services/deliver_files/file_fetcher/models.py
index ef38780862..f22a491d43 100644
--- a/cg/services/deliver_files/file_fetcher/models.py
+++ b/cg/services/deliver_files/file_fetcher/models.py
@@ -7,7 +7,7 @@ class DeliveryMetaData(BaseModel):
case_id: str
customer_internal_id: str
ticket_id: str
- customer_ticket_inbox: Path | None = None
+ delivery_path: Path | None = None
class CaseFile(BaseModel):
diff --git a/cg/services/deliver_files/file_fetcher/raw_data_service.py b/cg/services/deliver_files/file_fetcher/raw_data_service.py
index bdc99cf1ca..a38dedd8d0 100644
--- a/cg/services/deliver_files/file_fetcher/raw_data_service.py
+++ b/cg/services/deliver_files/file_fetcher/raw_data_service.py
@@ -43,11 +43,16 @@ def __init__(
self.hk_api = hk_api
self.tags_fetcher = tags_fetcher
- def get_files_to_deliver(self, case_id: str) -> DeliveryFiles:
- """Return a list of raw data files to be delivered for a case and its samples."""
+ def get_files_to_deliver(self, case_id: str, sample_id: str | None = None) -> DeliveryFiles:
+ """
+ Return a list of raw data files to be delivered for a case and its samples.
+ args:
+ case_id: The case id to deliver files for
+ sample_id: The sample id to deliver files for
+ """
LOG.debug(f"[FETCH SERVICE] Fetching raw data files for case: {case_id}")
case: Case = self.status_db.get_case_by_internal_id(internal_id=case_id)
- sample_ids: list[str] = case.sample_ids
+ sample_ids: list[str] = [sample_id] if sample_id else case.sample_ids
raw_data_files: list[SampleFile] = []
for sample_id in sample_ids:
raw_data_files.extend(
@@ -68,7 +73,12 @@ def get_files_to_deliver(self, case_id: str) -> DeliveryFiles:
@staticmethod
def _validate_delivery_has_content(delivery_files: DeliveryFiles) -> DeliveryFiles:
- """Check if the delivery files has files to deliver."""
+ """Check if the delivery files has files to deliver.
+ raises:
+ NoDeliveryFilesError if no files to deliver.
+ args:
+ delivery_files: The delivery files to check
+ """
for sample_file in delivery_files.sample_files:
LOG.debug(
f"Found file to deliver: {sample_file.file_path} for sample: {sample_file.sample_id}"
@@ -82,7 +92,12 @@ def _validate_delivery_has_content(delivery_files: DeliveryFiles) -> DeliveryFil
@handle_missing_bundle_errors
def _get_raw_data_files_for_sample(self, case_id: str, sample_id: str) -> list[SampleFile]:
- """Get the RawData files for a sample."""
+ """
+ Get the RawData files for a sample. Hardcoded tags to fetch from the raw data workflow.
+ args:
+ case_id: The case id to get the raw data files for
+ sample_id: The sample id to get the raw data files for
+ """
file_tags: list[set[str]] = self.tags_fetcher.fetch_tags(Workflow.RAW_DATA).sample_tags
raw_data_files: list[File] = self.hk_api.get_files_from_latest_version_containing_tags(
bundle_name=sample_id, tags=file_tags
diff --git a/cg/services/deliver_files/file_filter/abstract.py b/cg/services/deliver_files/file_filter/abstract.py
deleted file mode 100644
index a0d846b544..0000000000
--- a/cg/services/deliver_files/file_filter/abstract.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from abc import abstractmethod, ABC
-
-from cg.services.deliver_files.file_fetcher.models import DeliveryFiles
-
-
-class FilterDeliveryFilesService(ABC):
-
- @abstractmethod
- def filter_delivery_files(self, delivery_files: DeliveryFiles, sample_id: str) -> DeliveryFiles:
- pass
diff --git a/cg/services/deliver_files/file_filter/sample_service.py b/cg/services/deliver_files/file_filter/sample_service.py
deleted file mode 100644
index 3f4ed9e56c..0000000000
--- a/cg/services/deliver_files/file_filter/sample_service.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from cg.services.deliver_files.file_fetcher.models import DeliveryFiles
-from cg.services.deliver_files.file_filter.abstract import FilterDeliveryFilesService
-
-
-class SampleFileFilter(FilterDeliveryFilesService):
-
- def filter_delivery_files(self, delivery_files: DeliveryFiles, sample_id: str) -> DeliveryFiles:
- delivery_files.sample_files = [
- sample_file
- for sample_file in delivery_files.sample_files
- if sample_file.sample_id == sample_id
- ]
- return delivery_files
diff --git a/cg/services/orders/submitters/__init__.py b/cg/services/deliver_files/file_formatter/destination/__init__.py
similarity index 100%
rename from cg/services/orders/submitters/__init__.py
rename to cg/services/deliver_files/file_formatter/destination/__init__.py
diff --git a/cg/services/deliver_files/file_formatter/abstract.py b/cg/services/deliver_files/file_formatter/destination/abstract.py
similarity index 70%
rename from cg/services/deliver_files/file_formatter/abstract.py
rename to cg/services/deliver_files/file_formatter/destination/abstract.py
index 31eb12f582..559f553e55 100644
--- a/cg/services/deliver_files/file_formatter/abstract.py
+++ b/cg/services/deliver_files/file_formatter/destination/abstract.py
@@ -1,10 +1,11 @@
from abc import abstractmethod, ABC
+from pathlib import Path
from cg.services.deliver_files.file_fetcher.models import DeliveryFiles
-from cg.services.deliver_files.file_formatter.models import FormattedFiles
+from cg.services.deliver_files.file_formatter.destination.models import FormattedFiles
-class DeliveryFileFormattingService(ABC):
+class DeliveryDestinationFormatter(ABC):
"""
Abstract class that encapsulates the logic required for formatting files to deliver.
"""
diff --git a/cg/services/deliver_files/file_formatter/service.py b/cg/services/deliver_files/file_formatter/destination/base_service.py
similarity index 56%
rename from cg/services/deliver_files/file_formatter/service.py
rename to cg/services/deliver_files/file_formatter/destination/base_service.py
index 2265db4f2e..5b5b3493e8 100644
--- a/cg/services/deliver_files/file_formatter/service.py
+++ b/cg/services/deliver_files/file_formatter/destination/base_service.py
@@ -1,31 +1,42 @@
import logging
-import os
from pathlib import Path
from cg.services.deliver_files.file_fetcher.models import CaseFile, DeliveryFiles, SampleFile
-from cg.services.deliver_files.file_formatter.abstract import DeliveryFileFormattingService
-from cg.services.deliver_files.file_formatter.models import FormattedFile, FormattedFiles
-from cg.services.deliver_files.file_formatter.utils.case_service import CaseFileFormatter
-from cg.services.deliver_files.file_formatter.utils.sample_concatenation_service import (
+from cg.services.deliver_files.file_formatter.destination.abstract import (
+ DeliveryDestinationFormatter,
+)
+from cg.services.deliver_files.file_formatter.destination.models import (
+ FormattedFile,
+ FormattedFiles,
+)
+from cg.services.deliver_files.file_formatter.files.case_service import CaseFileFormatter
+from cg.services.deliver_files.file_formatter.files.mutant_service import (
+ MutantFileFormatter,
+)
+from cg.services.deliver_files.file_formatter.files.concatenation_service import (
SampleFileConcatenationFormatter,
)
-from cg.services.deliver_files.file_formatter.utils.sample_service import SampleFileFormatter
+from cg.services.deliver_files.file_formatter.files.sample_service import (
+ SampleFileFormatter,
+)
LOG = logging.getLogger(__name__)
-class DeliveryFileFormatter(DeliveryFileFormattingService):
+class BaseDeliveryFormatter(DeliveryDestinationFormatter):
"""
Format the files to be delivered in the generic format.
- Expected structure:
- /inbox///
- /inbox///
+ args:
+ case_file_formatter: The case file formatter
+ sample_file_formatter: The sample file formatter. This can be a SampleFileFormatter, SampleFileConcatenationFormatter or MutantFileFormatter.
"""
def __init__(
self,
case_file_formatter: CaseFileFormatter,
- sample_file_formatter: SampleFileFormatter | SampleFileConcatenationFormatter,
+ sample_file_formatter: (
+ SampleFileFormatter | SampleFileConcatenationFormatter | MutantFileFormatter
+ ),
):
self.case_file_formatter = case_file_formatter
self.sample_file_formatter = sample_file_formatter
@@ -33,32 +44,26 @@ def __init__(
def format_files(self, delivery_files: DeliveryFiles) -> FormattedFiles:
"""Format the files to be delivered and return the formatted files in the generic format."""
LOG.debug("[FORMAT SERVICE] Formatting files for delivery")
- ticket_dir_path: Path = delivery_files.delivery_data.customer_ticket_inbox
- self._create_ticket_dir(ticket_dir_path)
formatted_files: list[FormattedFile] = self._format_sample_and_case_files(
sample_files=delivery_files.sample_files,
case_files=delivery_files.case_files,
- ticket_dir_path=ticket_dir_path,
+ delivery_path=delivery_files.delivery_data.delivery_path,
)
return FormattedFiles(files=formatted_files)
def _format_sample_and_case_files(
- self, sample_files: list[SampleFile], case_files: list[CaseFile], ticket_dir_path: Path
+ self, sample_files: list[SampleFile], case_files: list[CaseFile], delivery_path: Path
) -> list[FormattedFile]:
"""Helper method to format both sample and case files."""
+ LOG.debug(f"[FORMAT SERVICE] delivery_path: {delivery_path}")
formatted_files: list[FormattedFile] = self.sample_file_formatter.format_files(
moved_files=sample_files,
- ticket_dir_path=ticket_dir_path,
+ delivery_path=delivery_path,
)
if case_files:
formatted_case_files: list[FormattedFile] = self.case_file_formatter.format_files(
moved_files=case_files,
- ticket_dir_path=ticket_dir_path,
+ delivery_path=delivery_path,
)
formatted_files.extend(formatted_case_files)
return formatted_files
-
- @staticmethod
- def _create_ticket_dir(ticket_dir_path: Path) -> None:
- """Create the ticket directory if it does not exist."""
- os.makedirs(ticket_dir_path, exist_ok=True)
diff --git a/cg/services/deliver_files/file_formatter/models.py b/cg/services/deliver_files/file_formatter/destination/models.py
similarity index 100%
rename from cg/services/deliver_files/file_formatter/models.py
rename to cg/services/deliver_files/file_formatter/destination/models.py
diff --git a/cg/services/deliver_files/file_formatter/files/__init__.py b/cg/services/deliver_files/file_formatter/files/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/cg/services/deliver_files/file_formatter/files/abstract.py b/cg/services/deliver_files/file_formatter/files/abstract.py
new file mode 100644
index 0000000000..bb1f241b3d
--- /dev/null
+++ b/cg/services/deliver_files/file_formatter/files/abstract.py
@@ -0,0 +1,15 @@
+from abc import abstractmethod, ABC
+from pathlib import Path
+
+from cg.services.deliver_files.file_fetcher.models import SampleFile, CaseFile
+from cg.services.deliver_files.file_formatter.destination.models import FormattedFile
+
+
+class FileFormatter(ABC):
+
+ @abstractmethod
+ def format_files(
+ self, moved_files: list[CaseFile | SampleFile], delivery_path: Path
+ ) -> list[FormattedFile]:
+ """Format the files to deliver."""
+ pass
diff --git a/cg/services/deliver_files/file_formatter/files/case_service.py b/cg/services/deliver_files/file_formatter/files/case_service.py
new file mode 100644
index 0000000000..deb1a1e4b0
--- /dev/null
+++ b/cg/services/deliver_files/file_formatter/files/case_service.py
@@ -0,0 +1,83 @@
+import logging
+from pathlib import Path
+
+from cg.services.deliver_files.file_fetcher.models import CaseFile
+from cg.services.deliver_files.file_formatter.files.abstract import FileFormatter
+from cg.services.deliver_files.file_formatter.destination.models import FormattedFile
+from cg.services.deliver_files.file_formatter.path_name.abstract import PathNameFormatter
+from cg.services.deliver_files.file_formatter.path_name.nested_structure import (
+ NestedStructurePathFormatter,
+)
+from cg.services.deliver_files.utils import FileManager
+
+LOG = logging.getLogger(__name__)
+
+
+class CaseFileFormatter(FileFormatter):
+ """
+ Format the case files to deliver and return the formatted files.
+ args:
+ path_name_formatter: The path name formatter to format paths to either a flat or nested structure in the delivery destination
+ file_manager: The file manager
+ """
+
+ def __init__(
+ self,
+ path_name_formatter: PathNameFormatter,
+ file_manager: FileManager,
+ ):
+ self.path_name_formatter = path_name_formatter
+ self.file_manager = file_manager
+
+ def format_files(self, moved_files: list[CaseFile], delivery_path: Path) -> list[FormattedFile]:
+ """Format the case files to deliver and return the formatted files.
+ args:
+ moved_files: The case files to format
+ delivery_path: The path to deliver the files to
+ """
+ LOG.debug("[FORMAT SERVICE] Formatting case files")
+ self._create_case_name_folder(
+ delivery_path=delivery_path, case_name=moved_files[0].case_name
+ )
+ return self._format_case_files(moved_files)
+
+ def _format_case_files(self, case_files: list[CaseFile]) -> list[FormattedFile]:
+ """Format the case files to deliver and return the formatted files.
+ args:
+ case_files: The case files to format
+ """
+ formatted_files: list[FormattedFile] = self._get_formatted_paths(case_files)
+ for formatted_file in formatted_files:
+ self.file_manager.rename_file(
+ src=formatted_file.original_path, dst=formatted_file.formatted_path
+ )
+ return formatted_files
+
+ def _create_case_name_folder(self, delivery_path: Path, case_name: str) -> None:
+ """
+ Create a folder for the case in the delivery path.
+ The folder is only created if the provided PathStructureFormatter is a NestedStructurePathFormatter.
+ args:
+ delivery_path: The path to deliver the files to
+ case_name: The name of the case
+ """
+ LOG.debug(f"[FORMAT SERVICE] Creating folder for case: {case_name}")
+ if isinstance(self.path_name_formatter, NestedStructurePathFormatter):
+ self.file_manager.create_directories(base_path=delivery_path, directories={case_name})
+
+ def _get_formatted_paths(self, case_files: list[CaseFile]) -> list[FormattedFile]:
+ """Return a list of formatted case files.
+ args:
+ case_files: The case files to format
+ """
+ formatted_files: list[FormattedFile] = []
+ for case_file in case_files:
+ formatted_path = self.path_name_formatter.format_file_path(
+ file_path=case_file.file_path,
+ provided_id=case_file.case_id,
+ provided_name=case_file.case_name,
+ )
+ formatted_files.append(
+ FormattedFile(original_path=case_file.file_path, formatted_path=formatted_path)
+ )
+ return formatted_files
diff --git a/cg/services/deliver_files/file_formatter/files/concatenation_service.py b/cg/services/deliver_files/file_formatter/files/concatenation_service.py
new file mode 100644
index 0000000000..4ae1c1a442
--- /dev/null
+++ b/cg/services/deliver_files/file_formatter/files/concatenation_service.py
@@ -0,0 +1,320 @@
+import logging
+from pathlib import Path
+import re
+
+from cg.constants.constants import ReadDirection, FileFormat, FileExtensions
+from cg.services.deliver_files.file_formatter.files.abstract import FileFormatter
+from cg.services.deliver_files.file_formatter.files.models import FastqFile
+from cg.services.deliver_files.file_formatter.path_name.abstract import PathNameFormatter
+
+from cg.services.fastq_concatenation_service.fastq_concatenation_service import (
+ FastqConcatenationService,
+)
+from cg.services.fastq_concatenation_service.utils import generate_concatenated_fastq_delivery_path
+from cg.services.deliver_files.file_fetcher.models import SampleFile
+from cg.services.deliver_files.file_formatter.destination.models import FormattedFile
+from cg.services.deliver_files.file_formatter.files.sample_service import (
+ FileManager,
+)
+from cg.services.deliver_files.file_formatter.path_name.nested_structure import (
+ NestedStructurePathFormatter,
+)
+from cg.utils.files import get_all_files_in_directory_tree
+
+LOG = logging.getLogger(__name__)
+
+
+class SampleFileConcatenationFormatter(FileFormatter):
+ """
+ Format the sample files to deliver, concatenate fastq files and return the formatted files.
+ Used for workflows: Microsalt.
+ args:
+ file_manager: The file manager
+ path_name_formatter: The path name formatter to format paths to either a flat or nested structure in the delivery destination
+ concatenation_service: The fastq concatenation service to concatenate fastq files.
+ """
+
+ def __init__(
+ self,
+ file_manager: FileManager,
+ path_name_formatter: PathNameFormatter,
+ concatenation_service: FastqConcatenationService,
+ ):
+ self.file_manager = file_manager
+ self.path_name_formatter = path_name_formatter
+ self.concatenation_service = concatenation_service
+
+ def format_files(
+ self, moved_files: list[SampleFile], delivery_path: Path
+ ) -> list[FormattedFile]:
+ """
+ Format the sample files to deliver, concatenate fastq files and return the formatted files.
+ args:
+ moved_files: list[SampleFile]: List of sample files to deliver.
+ These are files that have been moved from housekeeper to the delivery path.
+ delivery_path: Path: Path to the delivery directory.
+ """
+ LOG.debug("[FORMAT SERVICE] Formatting and concatenating sample files")
+ sample_names: set[str] = self._get_sample_names(sample_files=moved_files)
+ self._create_sample_directories(delivery_path=delivery_path, sample_names=sample_names)
+ formatted_files: list[FormattedFile] = self._format_sample_file_paths(moved_files)
+ LOG.debug(
+ f"[FORMAT SERVICE] number of formatted files: {len(formatted_files)}, number of moved files: {len(moved_files)}"
+ )
+ self._rename_original_files(formatted_files)
+ LOG.debug(f"[FORMAT SERVICE] delivery_path: {delivery_path}")
+ concatenation_map: dict[Path, Path] = self._concatenate_fastq_files(
+ delivery_path=delivery_path,
+ sample_names=sample_names,
+ )
+ self._replace_fastq_paths(
+ concatenation_maps=concatenation_map,
+ formatted_files=formatted_files,
+ )
+ return formatted_files
+
+ @staticmethod
+ def _get_sample_names(sample_files: list[SampleFile]) -> set[str]:
+ """Extract sample names from the sample files."""
+ return {sample_file.sample_name for sample_file in sample_files}
+
+ def _create_sample_directories(self, sample_names: set[str], delivery_path: Path) -> None:
+ """Create directories for each sample name only if the file name formatter is the NestedSampleFileFormatter.
+ args:
+ sample_names: set[str]: Set of sample names.
+ delivery_path: Path: Path to the delivery directory.
+ """
+ if not isinstance(self.path_name_formatter, NestedStructurePathFormatter):
+ return
+ for sample_name in sample_names:
+ self.file_manager.create_directories(base_path=delivery_path, directories={sample_name})
+
+ def _format_sample_file_paths(self, sample_files: list[SampleFile]) -> list[FormattedFile]:
+ """
+ Return a list of formatted sample files.
+ args:
+ sample_files: The sample files to format
+ """
+ return [
+ FormattedFile(
+ original_path=sample_file.file_path,
+ formatted_path=self.path_name_formatter.format_file_path(
+ file_path=sample_file.file_path,
+ provided_id=sample_file.sample_id,
+ provided_name=sample_file.sample_name,
+ ),
+ )
+ for sample_file in sample_files
+ ]
+
+ def _rename_original_files(self, formatted_files: list[FormattedFile]) -> None:
+ """
+ Rename the formatted files.
+ args:
+ formatted_files: list[FormattedFile]: List of formatted files.
+ """
+ LOG.debug("[FORMAT SERVICE] Renaming original files")
+ for formatted_file in formatted_files:
+ self.file_manager.rename_file(
+ src=formatted_file.original_path, dst=formatted_file.formatted_path
+ )
+
+ def _concatenate_fastq_files(
+ self, delivery_path: Path, sample_names: set[str]
+ ) -> dict[Path, Path]:
+ """Concatenate fastq files for each sample and return the forward and reverse concatenated paths.
+ args:
+ delivery_path: Path: Path to the delivery directory.
+ sample_names: set[str]: Set of sample names.
+ returns:
+ dict[Path, Path]: Dictionary with the original fastq file path as key and the concatenated path as value.
+ """
+ LOG.debug(f"[FORMAT SERVICE] delivery_path: {delivery_path}")
+ fastq_files: list[FastqFile] = self._get_unique_sample_fastq_paths(
+ sample_names=sample_names, delivery_path=delivery_path
+ )
+ grouped_fastq_files: dict[str, list[FastqFile]] = self._group_fastq_files_per_sample(
+ sample_names=sample_names, fastq_files=fastq_files
+ )
+ concatenation_maps: dict[Path, Path] = {}
+ for sample in grouped_fastq_files.keys():
+ fastq_directory: Path = grouped_fastq_files[sample][0].fastq_file_path.parent
+ forward_path: Path = generate_concatenated_fastq_delivery_path(
+ fastq_directory=fastq_directory,
+ sample_name=sample,
+ direction=ReadDirection.FORWARD,
+ )
+ reverse_path: Path = generate_concatenated_fastq_delivery_path(
+ fastq_directory=fastq_directory,
+ sample_name=sample,
+ direction=ReadDirection.REVERSE,
+ )
+ self.concatenation_service.concatenate(
+ sample_id=sample,
+ fastq_directory=fastq_directory,
+ forward_output_path=forward_path,
+ reverse_output_path=reverse_path,
+ remove_raw=True,
+ )
+ concatenation_maps.update(
+ self._get_concatenation_map(
+ forward_path=forward_path,
+ reverse_path=reverse_path,
+ fastq_files=grouped_fastq_files[sample],
+ )
+ )
+ return concatenation_maps
+
+ def _get_unique_sample_fastq_paths(
+ self, sample_names: set[str], delivery_path: Path
+ ) -> list[FastqFile]:
+ """
+ Get a list of unique sample fastq file paths given a delivery path.
+ args:
+ sample_names: set[str]: Set of sample names.
+ delivery_path: Path: Path to the delivery directory
+ returns:
+ list[FastqFile]: List of FastqFile objects.
+ """
+ sample_paths: list[FastqFile] = []
+ LOG.debug(
+ f"[CONCATENATION SERVICE] Getting unique sample fastq file paths in {delivery_path}"
+ )
+ list_of_files: list[Path] = get_all_files_in_directory_tree(delivery_path)
+ for sample_name in sample_names:
+ for file in list_of_files:
+ if self._has_expected_sample_name_format_match(
+ sample_name=sample_name, file_path=file
+ ) and self._is_lane_fastq_file(file):
+ LOG.debug(
+ f"[CONCATENATION SERVICE] Found fastq file: {file} for sample: {sample_name}"
+ )
+ sample_paths.append(
+ FastqFile(
+ fastq_file_path=Path(delivery_path, file),
+ sample_name=sample_name,
+ read_direction=self._determine_read_direction(file),
+ )
+ )
+ if not sample_paths:
+ raise FileNotFoundError(
+ f"Could not find any fastq files to concatenate in {delivery_path}."
+ )
+ return sample_paths
+
+ @staticmethod
+ def _has_expected_sample_name_format_match(sample_name: str, file_path: Path) -> bool:
+ """
+ Check if the sample name is an exact match in the file path.
+ Fastq files are expected to have the sample name in the file path formatted as such: _{sample_name}_
+ args:
+ sample_name: str: The sample name to match.
+ file_path: Path: The file path to check.
+ """
+ return f"_{sample_name}_" in file_path.as_posix()
+
+ @staticmethod
+ def _get_concatenation_map(
+ forward_path: Path, reverse_path: Path, fastq_files: list[FastqFile]
+ ) -> dict[Path, Path]:
+ """
+ Get a list of ConcatenationMap objects for a sample.
+ NOTE: the fastq_files must be grouped by sample name.
+ args:
+ forward_path: Path: Path to the forward concatenated file.
+ reverse_path: Path: Path to the reverse concatenated file.
+ fastq_files: list[FastqFile]: List of fastq files for a single ample.
+ """
+ concatenation_map: dict[Path, Path] = {}
+ for fastq_file in fastq_files:
+ concatenation_map[fastq_file.fastq_file_path] = (
+ forward_path if fastq_file.read_direction == ReadDirection.FORWARD else reverse_path
+ )
+ return concatenation_map
+
+ @staticmethod
+ def _determine_read_direction(fastq_path: Path) -> ReadDirection:
+ """Determine the read direction of a fastq file.
+ Assumes that the fastq file path contains 'R1' or 'R2' to determine the read direction.
+ args:
+ fastq_path: Path: Path to the fastq file.
+ """
+ if f"R{ReadDirection.FORWARD}" in fastq_path.as_posix():
+ return ReadDirection.FORWARD
+ return ReadDirection.REVERSE
+
+ def _group_fastq_files_per_sample(
+ self, sample_names: set[str], fastq_files: list[FastqFile]
+ ) -> dict[str, list[FastqFile]]:
+ """Group fastq files per sample.
+ returns a dictionary with sample names as keys and a list of fastq files as values.
+ args:
+ sample_names: set[str]: Set of sample names.
+ fastq_files: list[FastqFile]: List of fastq files.
+ """
+
+ sample_fastq_files: dict[str, list[FastqFile]] = {
+ sample_name: [] for sample_name in sample_names
+ }
+ for fastq_file in fastq_files:
+ sample_fastq_files[fastq_file.sample_name].append(fastq_file)
+ self._validate_sample_fastq_file_share_same_directory(sample_fastq_files=sample_fastq_files)
+ return sample_fastq_files
+
+ def _replace_fastq_paths(
+ self,
+ concatenation_maps: dict[Path, Path],
+ formatted_files: list[FormattedFile],
+ ) -> None:
+ """
+ Replace the fastq file paths with the new concatenated fastq file paths.
+ Uses the concatenation map with the formatted file path as key and the concatenated path as value.
+ args:
+ concatenation_maps: list[ConcatenationMap]: List of ConcatenationMap objects.
+ formatted_files: list[FormattedFile]: List of formatted files.
+ """
+ for formatted_file in formatted_files:
+ if self._is_lane_fastq_file(formatted_file.formatted_path):
+ formatted_file.formatted_path = concatenation_maps[formatted_file.formatted_path]
+
+ @staticmethod
+ def _validate_sample_fastq_file_share_same_directory(
+ sample_fastq_files: dict[str, list[FastqFile]]
+ ) -> None:
+ """
+ Assert that all fastq files for a sample share the same directory.
+ This is to ensure that the files are concatenated within the expected directory path.
+ raises: ValueError if the fastq files are not in the same directory.
+ args:
+ sample_fastq_files: dict[str, list[FastqFile]]: Dictionary of sample names and their fastq files.
+ """
+ for sample_name in sample_fastq_files.keys():
+ fastq_files: list[FastqFile] = sample_fastq_files[sample_name]
+ parent_dir: Path = fastq_files[0].fastq_file_path.parent
+ for fastq_file in fastq_files:
+ if fastq_file.fastq_file_path.parent != parent_dir:
+ raise ValueError(
+ f"Sample {sample_name} fastq files are not in the same directory. "
+ f"Cannot concatenate. It will would result in sporadic file paths."
+ )
+
+ @staticmethod
+ def _is_lane_fastq_file(file_path: Path) -> bool:
+ """Check if a fastq file is a from a lane and read direction.
+ Note pattern: *_L[0-9]{3}_R[1-2]_[0-9]{3}.fastq.gz
+ *_ is a wildcard for the flow cell id followed by sample name.
+ L[0-9]{3} is the lane number, i.e. L001, L002 etc.
+ R[1-2] is the read direction, i.e. R1 or R2.
+ [0-9]{3} is the trailing three digits after read direction.
+ args:
+ file_path: Path: Path to the fastq file.
+ """
+
+ pattern = f".*_L[0-9]{{3}}_R[1-2]_[0-9]{{3}}{FileExtensions.FASTQ}{FileExtensions.GZIP}"
+ return (
+ re.fullmatch(
+ pattern=pattern,
+ string=file_path.name,
+ )
+ is not None
+ )
diff --git a/cg/services/deliver_files/file_formatter/files/models.py b/cg/services/deliver_files/file_formatter/files/models.py
new file mode 100644
index 0000000000..52c6db156a
--- /dev/null
+++ b/cg/services/deliver_files/file_formatter/files/models.py
@@ -0,0 +1,12 @@
+from pathlib import Path
+from pydantic import BaseModel
+
+from cg.constants.constants import ReadDirection
+
+
+class FastqFile(BaseModel):
+ """A fastq file with a sample name, file path and read direction."""
+
+ sample_name: str
+ fastq_file_path: Path
+ read_direction: ReadDirection
diff --git a/cg/services/deliver_files/file_formatter/files/mutant_service.py b/cg/services/deliver_files/file_formatter/files/mutant_service.py
new file mode 100644
index 0000000000..910a72bd70
--- /dev/null
+++ b/cg/services/deliver_files/file_formatter/files/mutant_service.py
@@ -0,0 +1,147 @@
+import logging
+from pathlib import Path
+import re
+from cg.apps.lims import LimsAPI
+from cg.services.deliver_files.file_fetcher.models import SampleFile
+from cg.services.deliver_files.file_formatter.files.abstract import FileFormatter
+from cg.services.deliver_files.file_formatter.destination.models import FormattedFile
+from cg.services.deliver_files.file_formatter.files.concatenation_service import (
+ SampleFileConcatenationFormatter,
+)
+from cg.services.deliver_files.file_formatter.files.sample_service import FileManager
+
+LOG = logging.getLogger(__name__)
+
+
+class MutantFileFormatter(FileFormatter):
+ """
+ Formatter for file to deliver or upload for the Mutant workflow.
+ Args:
+ lims_api: The LIMS API
+ file_formatter: The SampleFileConcatenationFormatter. This is used to format the files and concatenate the fastq files.
+ file_manager: The FileManager
+
+ """
+
+ def __init__(
+ self,
+ lims_api: LimsAPI,
+ file_formatter: SampleFileConcatenationFormatter,
+ file_manager: FileManager,
+ ):
+ self.lims_api: LimsAPI = lims_api
+ self.file_formatter: SampleFileConcatenationFormatter = file_formatter
+ self.file_manager = file_manager
+
+ def format_files(
+ self, moved_files: list[SampleFile], delivery_path: Path
+ ) -> list[FormattedFile]:
+ """
+ Format the mutant files to deliver and return the formatted files.
+ args:
+ moved_files: The sample files to format
+ delivery_path: The path to deliver the files
+
+ """
+ LOG.debug("[FORMAT SERVICE] Formatting and concatenating mutant files")
+ formatted_files: list[FormattedFile] = self.file_formatter.format_files(
+ moved_files=moved_files, delivery_path=delivery_path
+ )
+ appended_formatted_files: list[FormattedFile] = self._add_lims_metadata_to_file_name(
+ formatted_files=formatted_files, sample_files=moved_files
+ )
+ unique_formatted_files: list[FormattedFile] = self._filter_unique_path_combinations(
+ appended_formatted_files
+ )
+ for unique_files in unique_formatted_files:
+ self.file_manager.rename_file(
+ src=unique_files.original_path, dst=unique_files.formatted_path
+ )
+ return unique_formatted_files
+
+ @staticmethod
+ def _is_concatenated_file(file_path: Path) -> bool:
+ """Check if the file is a concatenated file.
+ Returns True if the file is a concatenated file, otherwise False.
+ regex pattern: *._[1,2].fastq.gz
+ *. is the sample id
+ _[1,2] is the read direction
+ .fastq.gz is the file extension
+ args:
+ file_path: The file path to check
+ """
+ pattern = ".*_[1,2].fastq.gz"
+ return re.fullmatch(pattern, file_path.name) is not None
+
+ def _add_lims_metadata_to_file_name(
+ self, formatted_files: list[FormattedFile], sample_files: list[SampleFile]
+ ) -> list[FormattedFile]:
+ """
+ This functions adds the region and lab code to the file name of the formatted files.
+ Note: The region and lab code is fetched from LIMS using the sample id. It is required for delivery of the files.
+ This should only be done for concatenated fastq files.
+
+ args:
+ formatted_files: The formatted files to add the metadata to
+ sample_files: The sample files to get the metadata from
+ """
+ appended_formatted_files: list[FormattedFile] = []
+ for formatted_file in formatted_files:
+ if self._is_concatenated_file(formatted_file.formatted_path):
+ sample_id: str = self._get_sample_id_by_original_path(
+ original_path=formatted_file.original_path, sample_files=sample_files
+ )
+ lims_meta_data = self.lims_api.get_sample_region_and_lab_code(sample_id)
+
+ new_original_path: Path = formatted_file.formatted_path
+ new_formatted_path = Path(
+ formatted_file.formatted_path.parent,
+ f"{lims_meta_data}{formatted_file.formatted_path.name}",
+ )
+ appended_formatted_files.append(
+ FormattedFile(
+ original_path=new_original_path, formatted_path=new_formatted_path
+ )
+ )
+ else:
+ appended_formatted_files.append(formatted_file)
+ return appended_formatted_files
+
+ @staticmethod
+ def _get_sample_id_by_original_path(original_path: Path, sample_files: list[SampleFile]) -> str:
+ """Get the sample id by the original path of the sample file.
+ args:
+ original_path: The original path of the sample file
+ sample_files: The list of sample files to search in
+ """
+ for sample_file in sample_files:
+ if sample_file.file_path == original_path:
+ return sample_file.sample_id
+ raise ValueError(f"Could not find sample file with path {original_path}")
+
+ @staticmethod
+ def _filter_unique_path_combinations(
+ formatted_files: list[FormattedFile],
+ ) -> list[FormattedFile]:
+ """
+ Filter out duplicates from the formatted files list.
+
+ note:
+ During fastq concatenation Sample_L1_R1 and Sample_L2_R1 files are concatenated
+ and moved to the same file Concat_Sample. This mean that there can be multiple entries
+ for the same concatenated file in the formatted_files list
+ coming from the SampleFileConcatenationService.
+ This function filters out the duplicates to avoid moving the same file multiple times
+ which would result in an error the second time since the files is no longer in the original path.
+
+ args:
+ formatted_files: The formatted files to filter
+ """
+ unique_combinations = set()
+ unique_files: list[FormattedFile] = []
+ for formatted_file in formatted_files:
+ combination = (formatted_file.original_path, formatted_file.formatted_path)
+ if combination not in unique_combinations:
+ unique_combinations.add(combination)
+ unique_files.append(formatted_file)
+ return unique_files
diff --git a/cg/services/deliver_files/file_formatter/files/sample_service.py b/cg/services/deliver_files/file_formatter/files/sample_service.py
new file mode 100644
index 0000000000..276a3b2649
--- /dev/null
+++ b/cg/services/deliver_files/file_formatter/files/sample_service.py
@@ -0,0 +1,71 @@
+import logging
+from pathlib import Path
+
+from cg.services.deliver_files.file_fetcher.models import SampleFile
+from cg.services.deliver_files.file_formatter.files.abstract import FileFormatter
+from cg.services.deliver_files.file_formatter.destination.models import FormattedFile
+from cg.services.deliver_files.file_formatter.path_name.abstract import PathNameFormatter
+from cg.services.deliver_files.utils import FileManager
+
+LOG = logging.getLogger(__name__)
+
+
+class SampleFileFormatter(FileFormatter):
+ """
+ Format the sample files to deliver.
+ Used for all workflows except Microsalt and Mutant.
+ args:
+ file_manager: The file manager
+ path_name_formatter: The path name formatter to format paths to either a flat or nested structure in the delivery destination
+ """
+
+ def __init__(
+ self,
+ file_manager: FileManager,
+ path_name_formatter: PathNameFormatter,
+ ):
+ self.file_manager = file_manager
+ self.path_name_formatter = path_name_formatter
+
+ def format_files(
+ self, moved_files: list[SampleFile], delivery_path: Path
+ ) -> list[FormattedFile]:
+ """
+ Format the sample files to deliver and return the formatted files.
+ args:
+ moved_sample_files: The sample files to format. These are files that have been moved from housekeeper to the delivery path.
+ delivery_path: The path to deliver the files to
+ """
+ LOG.debug("[FORMAT SERVICE] Formatting sample files")
+ sample_names: set[str] = self._get_sample_names(sample_files=moved_files)
+ for sample_name in sample_names:
+ self.file_manager.create_directories(base_path=delivery_path, directories={sample_name})
+ formatted_files: list[FormattedFile] = self._format_sample_file_paths(moved_files)
+ for formatted_file in formatted_files:
+ self.file_manager.rename_file(
+ src=formatted_file.original_path, dst=formatted_file.formatted_path
+ )
+ return formatted_files
+
+ @staticmethod
+ def _get_sample_names(sample_files: list[SampleFile]) -> set[str]:
+ """Extract sample names from the sample files."""
+ return {sample_file.sample_name for sample_file in sample_files}
+
+ def _format_sample_file_paths(self, sample_files: list[SampleFile]) -> list[FormattedFile]:
+ """
+ Return a list of formatted sample files.
+ args:
+ sample_files: The sample files to format
+ """
+ return [
+ FormattedFile(
+ original_path=sample_file.file_path,
+ formatted_path=self.path_name_formatter.format_file_path(
+ file_path=sample_file.file_path,
+ provided_id=sample_file.sample_id,
+ provided_name=sample_file.sample_name,
+ ),
+ )
+ for sample_file in sample_files
+ ]
diff --git a/cg/services/deliver_files/file_formatter/path_name/__init__.py b/cg/services/deliver_files/file_formatter/path_name/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/cg/services/deliver_files/file_formatter/path_name/abstract.py b/cg/services/deliver_files/file_formatter/path_name/abstract.py
new file mode 100644
index 0000000000..5fc0409843
--- /dev/null
+++ b/cg/services/deliver_files/file_formatter/path_name/abstract.py
@@ -0,0 +1,13 @@
+from abc import abstractmethod, ABC
+from pathlib import Path
+
+
+class PathNameFormatter(ABC):
+ """
+ Abstract class that encapsulates the logic required for formatting the path name.
+ """
+
+ @abstractmethod
+ def format_file_path(self, file_path: Path, provided_id: str, provided_name: str) -> Path:
+ """Format the file path."""
+ pass
diff --git a/cg/services/deliver_files/file_formatter/path_name/flat_structure.py b/cg/services/deliver_files/file_formatter/path_name/flat_structure.py
new file mode 100644
index 0000000000..f851a6bf7b
--- /dev/null
+++ b/cg/services/deliver_files/file_formatter/path_name/flat_structure.py
@@ -0,0 +1,24 @@
+from pathlib import Path
+
+from cg.services.deliver_files.file_formatter.files.sample_service import LOG
+from cg.services.deliver_files.file_formatter.path_name.abstract import PathNameFormatter
+
+
+class FlatStructurePathFormatter(PathNameFormatter):
+ """
+ Class to format sample file names in place.
+ """
+
+ def format_file_path(self, file_path: Path, provided_id: str, provided_name: str) -> Path:
+ """
+ Returns formatted files with original and formatted file names:
+ Replaces id by name.
+ args:
+ file_path: The path to the file
+ provided_id: The id to replace
+ provided_name: The name to replace the id with
+ """
+ LOG.debug("[FORMAT SERVICE] Formatting sample file names with flat structure.")
+ replaced_name = file_path.name.replace(provided_id, provided_name)
+ formatted_path = Path(file_path.parent, replaced_name)
+ return formatted_path
diff --git a/cg/services/deliver_files/file_formatter/path_name/nested_structure.py b/cg/services/deliver_files/file_formatter/path_name/nested_structure.py
new file mode 100644
index 0000000000..26ede0ea99
--- /dev/null
+++ b/cg/services/deliver_files/file_formatter/path_name/nested_structure.py
@@ -0,0 +1,26 @@
+from pathlib import Path
+
+from cg.services.deliver_files.file_formatter.files.sample_service import LOG
+from cg.services.deliver_files.file_formatter.path_name.abstract import PathNameFormatter
+
+
+class NestedStructurePathFormatter(PathNameFormatter):
+ """
+ Class to format sample file names and paths in a nested format used to deliver files to a customer inbox.
+ """
+
+ def format_file_path(self, file_path: Path, provided_id: str, provided_name: str) -> Path:
+ """
+ Returns formatted files with original and formatted file names:
+ 1. Adds a folder with provided name to the path of the files.
+ 2. Replaces id by name.
+
+ args:
+ file_path: The path to the file
+ provided_id: The id to replace
+ provided_name: The name to replace the id with
+ """
+ LOG.debug("[FORMAT SERVICE] Formatting sample file names with nested structure.")
+ replaced_name = file_path.name.replace(provided_id, provided_name)
+ formatted_path = Path(file_path.parent, provided_name, replaced_name)
+ return formatted_path
diff --git a/cg/services/deliver_files/file_formatter/utils/case_service.py b/cg/services/deliver_files/file_formatter/utils/case_service.py
deleted file mode 100644
index ccc4f656e6..0000000000
--- a/cg/services/deliver_files/file_formatter/utils/case_service.py
+++ /dev/null
@@ -1,48 +0,0 @@
-import os
-from pathlib import Path
-
-from cg.services.deliver_files.file_fetcher.models import CaseFile
-from cg.services.deliver_files.file_formatter.models import FormattedFile
-
-
-class CaseFileFormatter:
-
- def format_files(
- self, moved_files: list[CaseFile], ticket_dir_path: Path
- ) -> list[FormattedFile]:
- """Format the case files to deliver and return the formatted files."""
- self._create_case_name_folder(
- ticket_path=ticket_dir_path, case_name=moved_files[0].case_name
- )
- return self._format_case_files(moved_files)
-
- def _format_case_files(self, case_files: list[CaseFile]) -> list[FormattedFile]:
- formatted_files: list[FormattedFile] = self._get_formatted_files(case_files)
- for formatted_file in formatted_files:
- os.rename(src=formatted_file.original_path, dst=formatted_file.formatted_path)
- return formatted_files
-
- @staticmethod
- def _create_case_name_folder(ticket_path: Path, case_name: str) -> None:
- case_dir_path = Path(ticket_path, case_name)
- case_dir_path.mkdir(exist_ok=True)
-
- @staticmethod
- def _get_formatted_files(case_files: list[CaseFile]) -> list[FormattedFile]:
- """
- Returns formatted files:
- 1. Adds a folder with case name to the path of the case files.
- 2. Replaces case id by case name.
- """
- formatted_files: list[FormattedFile] = []
- for case_file in case_files:
- replaced_case_file_name: str = case_file.file_path.name.replace(
- case_file.case_id, case_file.case_name
- )
- formatted_file_path = Path(
- case_file.file_path.parent, case_file.case_name, replaced_case_file_name
- )
- formatted_files.append(
- FormattedFile(original_path=case_file.file_path, formatted_path=formatted_file_path)
- )
- return formatted_files
diff --git a/cg/services/deliver_files/file_formatter/utils/sample_concatenation_service.py b/cg/services/deliver_files/file_formatter/utils/sample_concatenation_service.py
deleted file mode 100644
index c7eaea6b63..0000000000
--- a/cg/services/deliver_files/file_formatter/utils/sample_concatenation_service.py
+++ /dev/null
@@ -1,128 +0,0 @@
-from pathlib import Path
-
-from cg.constants.constants import ReadDirection, FileFormat, FileExtensions
-
-from cg.services.fastq_concatenation_service.fastq_concatenation_service import (
- FastqConcatenationService,
-)
-from cg.services.fastq_concatenation_service.utils import generate_concatenated_fastq_delivery_path
-from cg.services.deliver_files.file_fetcher.models import SampleFile
-from cg.services.deliver_files.file_formatter.models import FormattedFile
-from cg.services.deliver_files.file_formatter.utils.sample_service import (
- SampleFileNameFormatter,
- FileManagingService,
-)
-
-
-class SampleFileConcatenationFormatter:
- """
- Format the sample files to deliver, concatenate fastq files and return the formatted files.
- Used for workflows: Microsalt.
- """
-
- def __init__(
- self,
- file_manager: FileManagingService,
- file_formatter: SampleFileNameFormatter,
- concatenation_service: FastqConcatenationService,
- ):
- self.file_manager = file_manager
- self.file_name_formatter = file_formatter
- self.concatenation_service = concatenation_service
-
- def format_files(
- self, moved_files: list[SampleFile], ticket_dir_path: Path
- ) -> list[FormattedFile]:
- """Format the sample files to deliver, concatenate fastq files and return the formatted files."""
- sample_names: set[str] = self.file_name_formatter.get_sample_names(sample_files=moved_files)
- for sample_name in sample_names:
- self.file_manager.create_directories(
- base_path=ticket_dir_path, directories={sample_name}
- )
- formatted_files: list[FormattedFile] = self.file_name_formatter.format_sample_file_names(
- sample_files=moved_files
- )
- for formatted_file in formatted_files:
- self.file_manager.rename_file(
- src=formatted_file.original_path, dst=formatted_file.formatted_path
- )
- forward_paths, reverse_path = self._concatenate_fastq_files(formatted_files=formatted_files)
- self._replace_fastq_paths(
- reverse_paths=reverse_path,
- forward_paths=forward_paths,
- formatted_files=formatted_files,
- )
- return formatted_files
-
- def _concatenate_fastq_files(
- self, formatted_files: list[FormattedFile]
- ) -> tuple[list[Path], list[Path]]:
- unique_sample_dir_paths: set[Path] = self._get_unique_sample_paths(
- sample_files=formatted_files
- )
- forward_paths: list[Path] = []
- reverse_paths: list[Path] = []
- for fastq_directory in unique_sample_dir_paths:
- sample_name: str = fastq_directory.name
-
- forward_path: Path = generate_concatenated_fastq_delivery_path(
- fastq_directory=fastq_directory,
- sample_name=sample_name,
- direction=ReadDirection.FORWARD,
- )
- forward_paths.append(forward_path)
- reverse_path: Path = generate_concatenated_fastq_delivery_path(
- fastq_directory=fastq_directory,
- sample_name=sample_name,
- direction=ReadDirection.REVERSE,
- )
- reverse_paths.append(reverse_path)
- self.concatenation_service.concatenate(
- fastq_directory=fastq_directory,
- forward_output_path=forward_path,
- reverse_output_path=reverse_path,
- remove_raw=True,
- )
- return forward_paths, reverse_paths
-
- @staticmethod
- def _get_unique_sample_paths(sample_files: list[FormattedFile]) -> set[Path]:
- sample_paths: list[Path] = []
- for sample_file in sample_files:
- sample_paths.append(sample_file.formatted_path.parent)
- return set(sample_paths)
-
- @staticmethod
- def _replace_fastq_formatted_file_path(
- formatted_files: list[FormattedFile],
- direction: ReadDirection,
- new_path: Path,
- ) -> None:
- """Replace the formatted file path with the new path."""
- for formatted_file in formatted_files:
- if (
- formatted_file.formatted_path.parent == new_path.parent
- and f"{FileFormat.FASTQ}{FileExtensions.GZIP}" in formatted_file.formatted_path.name
- and f"R{direction}" in formatted_file.formatted_path.name
- ):
- formatted_file.formatted_path = new_path
-
- def _replace_fastq_paths(
- self,
- forward_paths: list[Path],
- reverse_paths: list[Path],
- formatted_files: list[FormattedFile],
- ) -> None:
- """Replace the fastq file paths with the new concatenated fastq file paths."""
- for forward_path in forward_paths:
- self._replace_fastq_formatted_file_path(
- formatted_files=formatted_files,
- direction=ReadDirection.FORWARD,
- new_path=forward_path,
- )
- for reverse_path in reverse_paths:
- self._replace_fastq_formatted_file_path(
- formatted_files=formatted_files,
- direction=ReadDirection.REVERSE,
- new_path=reverse_path,
- )
diff --git a/cg/services/deliver_files/file_formatter/utils/sample_service.py b/cg/services/deliver_files/file_formatter/utils/sample_service.py
deleted file mode 100644
index 8efc383d1c..0000000000
--- a/cg/services/deliver_files/file_formatter/utils/sample_service.py
+++ /dev/null
@@ -1,84 +0,0 @@
-import os
-from pathlib import Path
-from cg.services.deliver_files.file_fetcher.models import SampleFile
-from cg.services.deliver_files.file_formatter.models import FormattedFile
-
-
-class FileManagingService:
- """
- Service to manage files.
- Handles operations that create or rename files and directories.
- """
-
- @staticmethod
- def create_directories(base_path: Path, directories: set[str]) -> None:
- """Create directories for given names under the base path."""
- for directory in directories:
- Path(base_path, directory).mkdir(exist_ok=True)
-
- @staticmethod
- def rename_file(src: Path, dst: Path) -> None:
- """Rename a file from src to dst."""
- os.rename(src, dst)
-
-
-class SampleFileNameFormatter:
- """
- Class to format sample file names.
- """
-
- @staticmethod
- def get_sample_names(sample_files: list[SampleFile]) -> set[str]:
- """Extract sample names from the sample files."""
- return {sample_file.sample_name for sample_file in sample_files}
-
- @staticmethod
- def format_sample_file_names(sample_files: list[SampleFile]) -> list[FormattedFile]:
- """
- Returns formatted files with original and formatted file names:
- 1. Adds a folder with sample name to the path of the sample files.
- 2. Replaces sample id by sample name.
- """
- formatted_files = []
- for sample_file in sample_files:
- replaced_name = sample_file.file_path.name.replace(
- sample_file.sample_id, sample_file.sample_name
- )
- formatted_path = Path(
- sample_file.file_path.parent, sample_file.sample_name, replaced_name
- )
- formatted_files.append(
- FormattedFile(original_path=sample_file.file_path, formatted_path=formatted_path)
- )
- return formatted_files
-
-
-class SampleFileFormatter:
- """
- Format the sample files to deliver.
- Used for all workflows except Microsalt and Mutant.
- """
-
- def __init__(
- self, file_manager: FileManagingService, file_name_formatter: SampleFileNameFormatter
- ):
- self.file_manager = file_manager
- self.file_name_formatter = file_name_formatter
-
- def format_files(
- self, moved_files: list[SampleFile], ticket_dir_path: Path
- ) -> list[FormattedFile]:
- """Format the sample files to deliver and return the formatted files."""
- sample_names: set[str] = self.file_name_formatter.get_sample_names(sample_files=moved_files)
- for sample_name in sample_names:
- self.file_manager.create_directories(
- base_path=ticket_dir_path, directories={sample_name}
- )
- formatted_files: list[FormattedFile] = self.file_name_formatter.format_sample_file_names(
- sample_files=moved_files
- )
- for formatted_file in formatted_files:
- self.file_manager.rename_file(
- src=formatted_file.original_path, dst=formatted_file.formatted_path
- )
- return formatted_files
diff --git a/cg/services/deliver_files/file_mover/abstract.py b/cg/services/deliver_files/file_mover/abstract.py
new file mode 100644
index 0000000000..54bf7638a6
--- /dev/null
+++ b/cg/services/deliver_files/file_mover/abstract.py
@@ -0,0 +1,11 @@
+from abc import ABC, abstractmethod
+from pathlib import Path
+
+from cg.services.deliver_files.file_fetcher.models import DeliveryFiles
+
+
+class DestinationFilesMover(ABC):
+ @abstractmethod
+ def move_files(self, delivery_files: DeliveryFiles, delivery_base_path: Path) -> DeliveryFiles:
+ """Move files to the delivery folder."""
+ pass
diff --git a/cg/services/deliver_files/file_mover/base_service.py b/cg/services/deliver_files/file_mover/base_service.py
new file mode 100644
index 0000000000..67ad1827b1
--- /dev/null
+++ b/cg/services/deliver_files/file_mover/base_service.py
@@ -0,0 +1,30 @@
+from pathlib import Path
+
+from cg.services.deliver_files.file_fetcher.models import DeliveryFiles, SampleFile, CaseFile
+from cg.services.deliver_files.file_mover.abstract import DestinationFilesMover
+from cg.services.deliver_files.utils import FileMover
+
+
+class BaseDestinationFilesMover(DestinationFilesMover):
+ """
+ Class to move files directly to the delivery base path.
+ """
+
+ def __init__(self, file_mover: FileMover):
+ self.file_mover = file_mover
+
+ def move_files(self, delivery_files: DeliveryFiles, delivery_base_path: Path) -> DeliveryFiles:
+ """
+ Move the files directly to the delivery base path.
+ args:
+ delivery_files: DeliveryFiles: The files to move.
+ delivery_base_path: Path: The path to move the files to.
+ """
+ delivery_files.delivery_data.delivery_path = delivery_base_path
+ delivery_files.case_files = self.file_mover.move_and_update_files(
+ file_models=delivery_files.case_files, target_dir=delivery_base_path
+ )
+ delivery_files.sample_files = self.file_mover.move_and_update_files(
+ file_models=delivery_files.sample_files, target_dir=delivery_base_path
+ )
+ return delivery_files
diff --git a/cg/services/deliver_files/file_mover/customer_inbox_service.py b/cg/services/deliver_files/file_mover/customer_inbox_service.py
new file mode 100644
index 0000000000..d613bca4bf
--- /dev/null
+++ b/cg/services/deliver_files/file_mover/customer_inbox_service.py
@@ -0,0 +1,61 @@
+import logging
+from pathlib import Path
+
+from cg.constants.delivery import INBOX_NAME
+from cg.services.deliver_files.file_fetcher.models import (
+ DeliveryFiles,
+ DeliveryMetaData,
+)
+from cg.services.deliver_files.file_mover.abstract import DestinationFilesMover
+from cg.services.deliver_files.utils import FileMover
+
+LOG = logging.getLogger(__name__)
+
+
+class CustomerInboxDestinationFilesMover(DestinationFilesMover):
+ """
+ Class to move files to the customer folder.
+ """
+
+ def __init__(self, file_mover: FileMover):
+ self.file_mover = file_mover
+
+ def move_files(self, delivery_files: DeliveryFiles, delivery_base_path: Path) -> DeliveryFiles:
+ """
+ Move the files to the customer folder.
+ args:
+ delivery_files: DeliveryFiles: The files to move.
+ delivery_base_path: Path: The path to move the files to.
+ """
+ inbox_ticket_dir_path: Path = self._create_ticket_inbox_dir_path(
+ delivery_base_path=delivery_base_path, delivery_data=delivery_files.delivery_data
+ )
+ delivery_files.delivery_data.delivery_path = inbox_ticket_dir_path
+
+ self.file_mover.create_directories(
+ base_path=delivery_base_path,
+ directories={str(inbox_ticket_dir_path.relative_to(delivery_base_path))},
+ )
+ delivery_files.case_files = self.file_mover.move_and_update_files(
+ file_models=delivery_files.case_files, target_dir=inbox_ticket_dir_path
+ )
+ delivery_files.sample_files = self.file_mover.move_and_update_files(
+ file_models=delivery_files.sample_files, target_dir=inbox_ticket_dir_path
+ )
+ return delivery_files
+
+ @staticmethod
+ def _create_ticket_inbox_dir_path(
+ delivery_base_path: Path, delivery_data: DeliveryMetaData
+ ) -> Path:
+ """Generate the path to the ticket inbox directory.
+ args:
+ delivery_base_path: The base path to the delivery folder.
+ delivery_data: The delivery data containing the customer internal id and ticket id.
+ """
+ return Path(
+ delivery_base_path,
+ delivery_data.customer_internal_id,
+ INBOX_NAME,
+ delivery_data.ticket_id,
+ )
diff --git a/cg/services/deliver_files/file_mover/service.py b/cg/services/deliver_files/file_mover/service.py
deleted file mode 100644
index d02d55d6be..0000000000
--- a/cg/services/deliver_files/file_mover/service.py
+++ /dev/null
@@ -1,111 +0,0 @@
-import logging
-from pathlib import Path
-
-from cg.constants.delivery import INBOX_NAME
-from cg.services.deliver_files.file_fetcher.models import (
- CaseFile,
- DeliveryFiles,
- DeliveryMetaData,
- SampleFile,
-)
-from cg.utils.files import link_or_overwrite_file
-
-LOG = logging.getLogger(__name__)
-
-
-class DeliveryFilesMover:
- """
- Class that encapsulates the logic required for moving files to the customer folder.
- """
-
- def move_files(self, delivery_files: DeliveryFiles, delivery_base_path: Path) -> DeliveryFiles:
- """Move the files to the customer folder."""
- inbox_ticket_dir_path: Path = self._create_ticket_inbox_dir_path(
- delivery_base_path=delivery_base_path, delivery_data=delivery_files.delivery_data
- )
- delivery_files.delivery_data.customer_ticket_inbox = inbox_ticket_dir_path
- self._create_ticket_inbox_folder(inbox_ticket_dir_path)
- self._create_hard_links_for_delivery_files(
- delivery_files=delivery_files, inbox_dir_path=inbox_ticket_dir_path
- )
- return self._replace_file_paths_with_inbox_dir_paths(
- delivery_files=delivery_files, inbox_dir_path=inbox_ticket_dir_path
- )
-
- @staticmethod
- def _create_ticket_inbox_folder(
- inbox_ticket_dir_path: Path,
- ) -> Path:
- """Create a ticket inbox folder in the customer folder, overwrites if already present."""
- LOG.debug(f"[MOVE SERVICE] Creating ticket inbox folder: {inbox_ticket_dir_path}")
- inbox_ticket_dir_path.mkdir(parents=True, exist_ok=True)
- return inbox_ticket_dir_path
-
- @staticmethod
- def _create_ticket_inbox_dir_path(
- delivery_base_path: Path, delivery_data: DeliveryMetaData
- ) -> Path:
- """Create the path to the ticket inbox folder."""
- return Path(
- delivery_base_path,
- delivery_data.customer_internal_id,
- INBOX_NAME,
- delivery_data.ticket_id,
- )
-
- @staticmethod
- def _create_inbox_file_path(file_path: Path, inbox_dir_path: Path) -> Path:
- """Create the path to the inbox file."""
- return Path(inbox_dir_path, file_path.name)
-
- def _create_hard_link_file_paths(
- self, file_models: list[SampleFile | CaseFile], inbox_dir_path: Path
- ) -> None:
- """Create hard links to the sample files in the customer folder."""
- for file_model in file_models:
- inbox_file_path: Path = self._create_inbox_file_path(
- file_path=file_model.file_path, inbox_dir_path=inbox_dir_path
- )
- link_or_overwrite_file(src=file_model.file_path, dst=inbox_file_path)
-
- def _create_hard_links_for_delivery_files(
- self, delivery_files: DeliveryFiles, inbox_dir_path: Path
- ) -> None:
- """Create hard links to the files in the customer folder."""
- LOG.debug(f"[MOVE SERVICE] Creating hard links for delivery files in: {inbox_dir_path}")
- if delivery_files.case_files:
- self._create_hard_link_file_paths(
- file_models=delivery_files.case_files, inbox_dir_path=inbox_dir_path
- )
- self._create_hard_link_file_paths(
- file_models=delivery_files.sample_files, inbox_dir_path=inbox_dir_path
- )
-
- def _replace_file_path_with_inbox_dir_path(
- self, file_models: list[SampleFile | CaseFile], inbox_dir_path: Path
- ) -> list[SampleFile | CaseFile]:
- """Replace the file path with the inbox path."""
- for file_model in file_models:
- inbox_file_path: Path = self._create_inbox_file_path(
- file_path=file_model.file_path, inbox_dir_path=inbox_dir_path
- )
- file_model.file_path = inbox_file_path
- return file_models
-
- def _replace_file_paths_with_inbox_dir_paths(
- self,
- delivery_files: DeliveryFiles,
- inbox_dir_path: Path,
- ) -> DeliveryFiles:
- """
- Replace to original file paths in the delivery files with the customer inbox file paths.
- """
- LOG.debug(f"[MOVE SERVICE] Replacing file paths with inbox dir path: {inbox_dir_path}")
- if delivery_files.case_files:
- delivery_files.case_files = self._replace_file_path_with_inbox_dir_path(
- file_models=delivery_files.case_files, inbox_dir_path=inbox_dir_path
- )
- delivery_files.sample_files = self._replace_file_path_with_inbox_dir_path(
- file_models=delivery_files.sample_files, inbox_dir_path=inbox_dir_path
- )
- return delivery_files
diff --git a/cg/services/deliver_files/rsync/service.py b/cg/services/deliver_files/rsync/service.py
index 3fa6194e46..5b5940a95f 100644
--- a/cg/services/deliver_files/rsync/service.py
+++ b/cg/services/deliver_files/rsync/service.py
@@ -11,19 +11,18 @@
from cg.constants import Workflow
from cg.constants.constants import FileFormat
from cg.constants.delivery import INBOX_NAME
-from cg.constants.priority import SlurmAccount, SlurmQos
-from cg.constants.tb import AnalysisTypes
+from cg.constants.priority import SlurmAccount, SlurmQos, TrailblazerPriority
+from cg.constants.tb import AnalysisType
from cg.exc import CgError
from cg.io.controller import WriteFile
+from cg.models.slurm.sbatch import Sbatch
from cg.services.deliver_files.rsync.models import RsyncDeliveryConfig
from cg.services.deliver_files.rsync.sbatch import (
+ COVID_REPORT_RSYNC,
COVID_RSYNC,
ERROR_RSYNC_FUNCTION,
RSYNC_COMMAND,
- COVID_REPORT_RSYNC,
)
-
-from cg.models.slurm.sbatch import Sbatch
from cg.store.models import Case
from cg.store.store import Store
@@ -53,6 +52,15 @@ def slurm_quality_of_service(self) -> str:
"""Return the slurm quality of service depending on the slurm account."""
return SlurmQos.HIGH if self.account == SlurmAccount.PRODUCTION else SlurmQos.LOW
+ @property
+ def trailblazer_priority(self) -> TrailblazerPriority:
+ """Return the trailblazer priority depending on the slurm account."""
+ return (
+ TrailblazerPriority.HIGH
+ if self.account == SlurmAccount.PRODUCTION
+ else TrailblazerPriority.LOW
+ )
+
@property
def trailblazer_config_path(self) -> Path:
"""Return Path to trailblazer config."""
@@ -159,10 +167,10 @@ def add_to_trailblazer_api(
)
tb_api.add_pending_analysis(
case_id=ticket,
- analysis_type=AnalysisTypes.OTHER,
+ analysis_type=AnalysisType.OTHER,
config_path=self.trailblazer_config_path.as_posix(),
out_dir=self.log_dir.as_posix(),
- slurm_quality_of_service=self.slurm_quality_of_service,
+ priority=self.trailblazer_priority,
email=self.mail_user,
workflow=Workflow.RSYNC,
ticket=ticket,
diff --git a/cg/services/deliver_files/tag_fetcher/bam_service.py b/cg/services/deliver_files/tag_fetcher/bam_service.py
index 571cf265df..6abf3a2830 100644
--- a/cg/services/deliver_files/tag_fetcher/bam_service.py
+++ b/cg/services/deliver_files/tag_fetcher/bam_service.py
@@ -14,7 +14,12 @@ class BamDeliveryTagsFetcher(FetchDeliveryFileTagsService):
@handle_tag_errors
def fetch_tags(self, workflow: Workflow) -> DeliveryFileTags:
- """Fetch the tags for the bam files to deliver."""
+ """
+ Fetch the tags for the bam files to deliver.
+ Hardcoded to only return the BAM tag.
+ args:
+ workflow: The workflow to fetch tags for
+ """
self._validate_workflow(workflow=workflow)
return DeliveryFileTags(
case_tags=None,
diff --git a/cg/services/deliver_files/tag_fetcher/fohm_upload_service.py b/cg/services/deliver_files/tag_fetcher/fohm_upload_service.py
new file mode 100644
index 0000000000..e0a42e393f
--- /dev/null
+++ b/cg/services/deliver_files/tag_fetcher/fohm_upload_service.py
@@ -0,0 +1,47 @@
+from cg.constants import Workflow, SequencingFileTag
+from cg.services.deliver_files.tag_fetcher.abstract import FetchDeliveryFileTagsService
+from cg.services.deliver_files.tag_fetcher.error_handling import handle_tag_errors
+from cg.services.deliver_files.tag_fetcher.models import DeliveryFileTags
+
+
+class FOHMUploadTagsFetcher(FetchDeliveryFileTagsService):
+ """Class to fetch tags for FOHM upload files."""
+
+ @handle_tag_errors
+ def fetch_tags(self, workflow: Workflow) -> DeliveryFileTags:
+ """
+ Fetch the tags for the bam files to deliver.
+ NOTE: workflow raw data here is required to fit the implementation of the raw data delivery file fetcher.
+ if workflow is MUTANT, return tags for consensus-sample and vcf-report to fetch sample files from the case bundle.
+ if workflow is RAW_DATA, return tags for fastq to fetch fastq files from the sample bundle.
+ Required since some of the sample specific files are stored on the case bundle, but also fastq files.
+ Not separating these would cause fetching of case bundle fastq files if present.
+
+ Hardcoded to only return the tags for the files to deliver.
+ args:
+ workflow: Workflow: The workflow to fetch tags
+ """
+ self._validate_workflow(workflow=workflow)
+ return (
+ DeliveryFileTags(
+ case_tags=None,
+ sample_tags=[{"consensus-sample"}, {"vcf-report"}],
+ )
+ if workflow == Workflow.MUTANT
+ else DeliveryFileTags(
+ case_tags=None,
+ sample_tags=[{SequencingFileTag.FASTQ}],
+ )
+ )
+
+ @staticmethod
+ def _validate_workflow(workflow: Workflow):
+ """
+ Validate the workflow.
+ NOTE: workflow raw data here is required to fit the implementation of the raw data delivery file fetcher.
+ args:
+ workflow: Workflow: The workflow to validate.
+ """
+ if workflow not in [Workflow.MUTANT, Workflow.RAW_DATA]:
+ raise ValueError(f"Workflow {workflow} is not supported for FOHM upload file delivery.")
+ return workflow
diff --git a/cg/services/deliver_files/tag_fetcher/models.py b/cg/services/deliver_files/tag_fetcher/models.py
index 580e95c663..791b7b767e 100644
--- a/cg/services/deliver_files/tag_fetcher/models.py
+++ b/cg/services/deliver_files/tag_fetcher/models.py
@@ -2,5 +2,11 @@
class DeliveryFileTags(BaseModel):
+ """
+ Model to hold the tags for the files to deliver.
+ case_tags: The tags for the case files to deliver
+ sample_tags: The tags for the sample files to deliver
+ """
+
case_tags: list[set[str]] | None
sample_tags: list[set[str]]
diff --git a/cg/services/deliver_files/tag_fetcher/sample_and_case_service.py b/cg/services/deliver_files/tag_fetcher/sample_and_case_service.py
index 14bc032266..fe822b9b2b 100644
--- a/cg/services/deliver_files/tag_fetcher/sample_and_case_service.py
+++ b/cg/services/deliver_files/tag_fetcher/sample_and_case_service.py
@@ -13,7 +13,10 @@ class SampleAndCaseDeliveryTagsFetcher(FetchDeliveryFileTagsService):
@handle_tag_errors
def fetch_tags(self, workflow: Workflow) -> DeliveryFileTags:
- """Get the case tags for the files that need to be delivered for a workflow."""
+ """Get the case tags for the files that need to be delivered for a workflow.
+ args:
+ workflow: The workflow to fetch tags for
+ """
self._validate_workflow(workflow)
return DeliveryFileTags(
case_tags=PIPELINE_ANALYSIS_TAG_MAP[workflow]["case_tags"],
diff --git a/cg/services/deliver_files/utils.py b/cg/services/deliver_files/utils.py
new file mode 100644
index 0000000000..69452ef988
--- /dev/null
+++ b/cg/services/deliver_files/utils.py
@@ -0,0 +1,123 @@
+import logging
+import os
+from pathlib import Path
+
+from cg.services.deliver_files.file_fetcher.models import SampleFile, CaseFile
+
+LOG = logging.getLogger(__name__)
+
+
+class FileManager:
+ """
+ Service to manage files.
+ Handles operations that create or rename files and directories.
+ """
+
+ @staticmethod
+ def create_directories(base_path: Path, directories: set[str]) -> None:
+ """Create directories for given names under the base path.
+ args:
+ base_path: The base path to create the directories under.
+ directories: The directories to create within the given base path. Can be a list of one.
+ """
+
+ for directory in directories:
+ LOG.debug(f"[FileManager] Creating directory or file: {base_path}/{directory}")
+ Path(base_path, directory).mkdir(parents=True, exist_ok=True)
+
+ @staticmethod
+ def rename_file(src: Path, dst: Path) -> None:
+ """
+ Rename a file from src to dst.
+ raise ValueError if src does not exist.
+ args:
+ src: The source file path.
+ dst: The destination file path.
+ """
+ if not src or not dst:
+ raise ValueError("Source and destination paths cannot be None.")
+ LOG.debug(f"[FileManager] Renaming file: {src} -> {dst}")
+ if not src.exists():
+ raise FileNotFoundError(f"Source file {src} does not exist.")
+ os.rename(src=src, dst=dst)
+
+ @staticmethod
+ def create_hard_link(src: Path, dst: Path) -> None:
+ """
+ Create a hard link from src to dst.
+ args:
+ src: The source file path.
+ dst: The destination file path.
+ """
+ LOG.debug(f"[FileManager] Creating hard link: {src} -> {dst}")
+ os.link(src=src, dst=dst)
+
+
+class FileMover:
+ """
+ Service class to move files.
+ Requires a file management service to perform file operations.
+ """
+
+ def __init__(self, file_manager):
+ """
+ args:
+ file_manager: Service for file operations (e.g., create directories, move files).
+ """
+ self.file_management_service = file_manager
+
+ def create_directories(self, base_path: Path, directories: set[str]) -> None:
+ """Create required directories.
+ args:
+ base_path: The base path to create the directories under.
+ directories: The directories to create.
+ """
+ self.file_management_service.create_directories(base_path, directories)
+
+ def move_files_to_directory(self, file_models: list, target_dir: Path) -> None:
+ """Move files to the target directory.
+ args:
+ file_models: The file models that contain the files to move.
+ target_dir: The directory to move the files to.
+ """
+ for file_model in file_models:
+ target_path = Path(target_dir, file_model.file_path.name)
+ self._move_or_link_file(src=file_model.file_path, dst=target_path)
+
+ @staticmethod
+ def update_file_paths(
+ file_models: list[CaseFile | SampleFile], target_dir: Path
+ ) -> list[CaseFile | SampleFile]:
+ """Update file paths to point to the target directory.
+ args:
+ file_models: The file models to update.
+ target_dir: The target directory to point the file paths to.
+ """
+ for file_model in file_models:
+ file_model.file_path = Path(target_dir, file_model.file_path.name)
+ return file_models
+
+ def move_and_update_files(
+ self, file_models: list[CaseFile | SampleFile], target_dir: Path
+ ) -> list[CaseFile | SampleFile]:
+ """Move files to the target directory and update the file paths.
+ args:
+ file_models: The file models that contain the files to move.
+ target_dir: The directory to move the files to.
+ """
+ if file_models:
+ self.move_files_to_directory(file_models=file_models, target_dir=target_dir)
+ return self.update_file_paths(file_models=file_models, target_dir=target_dir)
+ return file_models
+
+ def _move_or_link_file(self, src: Path, dst: Path) -> None:
+ """Move or create a hard link for a file.
+ args:
+ src: The source file path
+ dst: The destination file path
+ """
+ LOG.debug(f"[FileMover] Moving file: {src} -> {dst}")
+ if dst.exists():
+ LOG.debug(f"Overwriting existing file: {dst}")
+ dst.unlink()
+ self.file_management_service.create_hard_link(src=src, dst=dst)
diff --git a/cg/services/delivery_message/delivery_message_service.py b/cg/services/delivery_message/delivery_message_service.py
index 3d00b22f39..e0682162ea 100644
--- a/cg/services/delivery_message/delivery_message_service.py
+++ b/cg/services/delivery_message/delivery_message_service.py
@@ -1,9 +1,7 @@
from cg.apps.tb import TrailblazerAPI
from cg.apps.tb.models import TrailblazerAnalysis
from cg.exc import OrderNotDeliverableError
-from cg.server.dto.delivery_message.delivery_message_request import (
- DeliveryMessageRequest,
-)
+from cg.server.dto.delivery_message.delivery_message_request import DeliveryMessageRequest
from cg.server.dto.delivery_message.delivery_message_response import (
DeliveryMessageOrderResponse,
DeliveryMessageResponse,
@@ -54,5 +52,5 @@ def _get_validated_analyses(
def _get_delivery_message(self, case_ids: set[str]) -> str:
cases: list[Case] = self.store.get_cases_by_internal_ids(case_ids)
validate_cases(cases=cases, case_ids=case_ids)
- message: str = get_message(cases)
+ message: str = get_message(cases=cases, store=self.store)
return message
diff --git a/cg/services/delivery_message/messages/__init__.py b/cg/services/delivery_message/messages/__init__.py
index 1c6794b367..88520919fe 100644
--- a/cg/services/delivery_message/messages/__init__.py
+++ b/cg/services/delivery_message/messages/__init__.py
@@ -1,11 +1,25 @@
-from cg.services.delivery_message.messages.analysis_scout_message import AnalysisScoutMessage
+from cg.services.delivery_message.messages.analysis_scout_message import (
+ AnalysisScoutMessage,
+)
from cg.services.delivery_message.messages.covid_message import CovidMessage
-from cg.services.delivery_message.messages.fastq_message import FastqMessage
-from cg.services.delivery_message.messages.fastq_scout_message import FastqScoutMessage
from cg.services.delivery_message.messages.fastq_analysis_scout_message import (
FastqAnalysisScoutMessage,
)
-from cg.services.delivery_message.messages.microsalt_mwr_message import MicrosaltMwrMessage
-from cg.services.delivery_message.messages.microsalt_mwx_message import MicrosaltMwxMessage
+from cg.services.delivery_message.messages.fastq_message import FastqMessage
+from cg.services.delivery_message.messages.fastq_scout_message import FastqScoutMessage
+from cg.services.delivery_message.messages.microsalt_mwr_message import (
+ MicrosaltMwrMessage,
+)
+from cg.services.delivery_message.messages.microsalt_mwx_message import (
+ MicrosaltMwxMessage,
+)
from cg.services.delivery_message.messages.scout_message import ScoutMessage
from cg.services.delivery_message.messages.statina_message import StatinaMessage
+from cg.services.delivery_message.messages.rna_delivery_message import (
+ RNAScoutStrategy,
+ RNAFastqStrategy,
+ RNAAnalysisStrategy,
+ RNAFastqAnalysisStrategy,
+ RNAUploadMessageStrategy,
+ RNADeliveryMessage,
+)
diff --git a/cg/services/delivery_message/messages/rna_delivery_message.py b/cg/services/delivery_message/messages/rna_delivery_message.py
new file mode 100644
index 0000000000..00b4bfc16d
--- /dev/null
+++ b/cg/services/delivery_message/messages/rna_delivery_message.py
@@ -0,0 +1,68 @@
+from abc import ABC, abstractmethod
+
+from cg.services.delivery_message.messages.utils import (
+ get_caesar_delivery_path,
+ get_scout_links_row_separated,
+)
+from cg.store.models import Case
+
+
+class RNAUploadMessageStrategy(ABC):
+ """Abstract base class for delivery message strategies."""
+
+ @abstractmethod
+ def get_file_upload_message(self, delivery_path: str) -> str:
+ """Generate the file upload message part."""
+ pass
+
+
+class RNAAnalysisStrategy(RNAUploadMessageStrategy):
+ def get_file_upload_message(self, delivery_path: str) -> str:
+ return (
+ f"The analysis files are currently being uploaded to your inbox on Caesar:\n\n"
+ f"{delivery_path}"
+ )
+
+
+class RNAFastqAnalysisStrategy(RNAUploadMessageStrategy):
+ def get_file_upload_message(self, delivery_path: str) -> str:
+ return (
+ f"The fastq and analysis files are currently being uploaded to your inbox on Caesar:\n\n"
+ f"{delivery_path}"
+ )
+
+
+class RNAFastqStrategy(RNAUploadMessageStrategy):
+ def get_file_upload_message(self, delivery_path: str) -> str:
+ return (
+ f"The fastq files are currently being uploaded to your inbox on Caesar:\n\n"
+ f"{delivery_path}"
+ )
+
+
+class RNAScoutStrategy(RNAUploadMessageStrategy):
+ def get_file_upload_message(self, delivery_path: str) -> str:
+ return "" # No file upload message needed for this case.
+
+
+class RNADeliveryMessage:
+ def __init__(self, store, strategy: RNAUploadMessageStrategy):
+ self.store = store
+ self.strategy = strategy
+
+ def create_message(self, cases: list[Case]) -> str:
+ message = "Hello,\n\n"
+ for case in cases:
+ scout_message = self._get_scout_message_for_case(case=case)
+ message += scout_message
+ delivery_path = get_caesar_delivery_path(cases[0])
+ file_upload_message = self.strategy.get_file_upload_message(delivery_path)
+ return message + file_upload_message
+
+ def _get_scout_message_for_case(self, case: Case) -> str:
+ related_uploaded_dna_cases = self.store.get_uploaded_related_dna_cases(rna_case=case)
+ scout_links = get_scout_links_row_separated(cases=related_uploaded_dna_cases)
+ return (
+ f"The analysis for case {case.name} has been uploaded to the corresponding DNA case(s) on Scout at:\n\n"
+ f"{scout_links}\n\n"
+ )
diff --git a/cg/services/delivery_message/messages/utils.py b/cg/services/delivery_message/messages/utils.py
index 09b47aed98..20ac60035d 100644
--- a/cg/services/delivery_message/messages/utils.py
+++ b/cg/services/delivery_message/messages/utils.py
@@ -13,6 +13,11 @@ def get_scout_link(case: Case) -> str:
return f"https://scout.scilifelab.se/{customer_id}/{case_name}"
+def get_scout_links_row_separated(cases: list[Case]) -> str:
+ scout_links: list[str] = [get_scout_link(case) for case in cases]
+ return "\n".join(scout_links)
+
+
def get_pangolin_delivery_path(case: Case) -> str:
customer_id: str = case.customer.internal_id
return f"/home/{customer_id}/inbox/wwLab_automatisk_hamtning"
diff --git a/cg/services/delivery_message/utils.py b/cg/services/delivery_message/utils.py
index 04ad891379..f3cfa87c41 100644
--- a/cg/services/delivery_message/utils.py
+++ b/cg/services/delivery_message/utils.py
@@ -1,9 +1,5 @@
from cg.constants.constants import DataDelivery, MicrosaltAppTags, Workflow
-from cg.exc import (
- CaseNotFoundError,
- DeliveryMessageNotSupportedError,
- OrderMismatchError,
-)
+from cg.exc import CaseNotFoundError, OrderMismatchError
from cg.services.delivery_message.messages import (
AnalysisScoutMessage,
CovidMessage,
@@ -17,47 +13,76 @@
from cg.services.delivery_message.messages.analysis_message import AnalysisMessage
from cg.services.delivery_message.messages.bam_message import BamMessage
from cg.services.delivery_message.messages.delivery_message import DeliveryMessage
-from cg.services.delivery_message.messages.fastq_analysis_message import (
- FastqAnalysisMessage,
-)
-from cg.services.delivery_message.messages.microsalt_mwx_message import (
- MicrosaltMwxMessage,
+from cg.services.delivery_message.messages.fastq_analysis_message import FastqAnalysisMessage
+from cg.services.delivery_message.messages.microsalt_mwx_message import MicrosaltMwxMessage
+from cg.services.delivery_message.messages.rna_delivery_message import (
+ RNAScoutStrategy,
+ RNAFastqStrategy,
+ RNAAnalysisStrategy,
+ RNAFastqAnalysisStrategy,
+ RNAUploadMessageStrategy,
+ RNADeliveryMessage,
)
from cg.store.models import Case, Sample
+from cg.store.store import Store
+
+MESSAGE_MAP = {
+ DataDelivery.ANALYSIS_FILES: AnalysisMessage,
+ DataDelivery.FASTQ: FastqMessage,
+ DataDelivery.SCOUT: ScoutMessage,
+ DataDelivery.FASTQ_SCOUT: FastqScoutMessage,
+ DataDelivery.FASTQ_ANALYSIS: FastqAnalysisMessage,
+ DataDelivery.ANALYSIS_SCOUT: AnalysisScoutMessage,
+ DataDelivery.FASTQ_ANALYSIS_SCOUT: FastqAnalysisScoutMessage,
+ DataDelivery.STATINA: StatinaMessage,
+ DataDelivery.BAM: BamMessage,
+}
-def get_message(cases: list[Case]) -> str:
- message_strategy: DeliveryMessage = get_message_strategy(cases[0])
+RNA_STRATEGY_MAP: dict[DataDelivery, type[RNAUploadMessageStrategy]] = {
+ # Only returns a message strategy if there is a scout delivery for the case.
+ DataDelivery.SCOUT: RNAScoutStrategy,
+ DataDelivery.FASTQ_SCOUT: RNAFastqStrategy,
+ DataDelivery.ANALYSIS_SCOUT: RNAAnalysisStrategy,
+ DataDelivery.FASTQ_ANALYSIS_SCOUT: RNAFastqAnalysisStrategy,
+}
+
+
+def get_message(cases: list[Case], store: Store) -> str:
+ message_strategy: DeliveryMessage = get_message_strategy(cases[0], store)
return message_strategy.create_message(cases)
-def get_message_strategy(case: Case) -> DeliveryMessage:
+def get_message_strategy(case: Case, store: Store) -> DeliveryMessage:
if case.data_analysis == Workflow.MICROSALT:
return get_microsalt_message_strategy(case)
if case.data_analysis == Workflow.MUTANT:
return CovidMessage()
+ if case.data_analysis == Workflow.MIP_RNA:
+ return get_rna_message_strategy_from_data_delivery(case=case, store=store)
+
message_strategy: DeliveryMessage = get_message_strategy_from_data_delivery(case)
return message_strategy
def get_message_strategy_from_data_delivery(case: Case) -> DeliveryMessage:
- message_strategy: DeliveryMessage = message_map[case.data_delivery]()
+ message_strategy: DeliveryMessage = MESSAGE_MAP[case.data_delivery]()
return message_strategy
-message_map = {
- DataDelivery.ANALYSIS_FILES: AnalysisMessage,
- DataDelivery.FASTQ: FastqMessage,
- DataDelivery.SCOUT: ScoutMessage,
- DataDelivery.FASTQ_SCOUT: FastqScoutMessage,
- DataDelivery.FASTQ_ANALYSIS: FastqAnalysisMessage,
- DataDelivery.ANALYSIS_SCOUT: AnalysisScoutMessage,
- DataDelivery.FASTQ_ANALYSIS_SCOUT: FastqAnalysisScoutMessage,
- DataDelivery.STATINA: StatinaMessage,
- DataDelivery.BAM: BamMessage,
-}
+def get_rna_message_strategy_from_data_delivery(
+ case: Case, store: Store
+) -> DeliveryMessage | RNADeliveryMessage:
+ """Get the RNA delivery message strategy based on the data delivery type.
+ If a scout delivery is required it will use the RNADeliveryMessage class that links RNA to DNA cases.
+ Otherwise it used the conventional delivery message strategy.
+ """
+ message_strategy = RNA_STRATEGY_MAP[case.data_delivery]
+ if message_strategy:
+ return RNADeliveryMessage(store=store, strategy=message_strategy())
+ return MESSAGE_MAP[case.data_delivery]()
def get_microsalt_message_strategy(case: Case) -> DeliveryMessage:
@@ -100,11 +125,6 @@ def validate_cases(cases: list[Case], case_ids: list[str]) -> None:
raise CaseNotFoundError("Internal id not found in the database")
if not is_matching_order(cases):
raise OrderMismatchError("Cases do not belong to the same order")
- cases_with_mip_rna: list[Case] = [
- case for case in cases if case.data_analysis == Workflow.MIP_RNA
- ]
- if cases_with_mip_rna:
- raise DeliveryMessageNotSupportedError("Workflow is not supported.")
def is_matching_order(cases: list[Case]) -> bool:
diff --git a/cg/services/fastq_concatenation_service/fastq_concatenation_service.py b/cg/services/fastq_concatenation_service/fastq_concatenation_service.py
index c36673cee6..4aaec3cf02 100644
--- a/cg/services/fastq_concatenation_service/fastq_concatenation_service.py
+++ b/cg/services/fastq_concatenation_service/fastq_concatenation_service.py
@@ -1,28 +1,48 @@
import logging
from pathlib import Path
+from cg.constants.constants import ReadDirection
from cg.services.fastq_concatenation_service.utils import (
- concatenate_forward_reads,
- concatenate_reverse_reads,
remove_raw_fastqs,
+ concatenate_fastq_reads_for_direction,
)
LOG = logging.getLogger(__name__)
class FastqConcatenationService:
+ """Fastq file concatenation service."""
+
+ @staticmethod
def concatenate(
- self,
+ sample_id: str,
fastq_directory: Path,
forward_output_path: Path,
reverse_output_path: Path,
remove_raw: bool = False,
):
- temp_forward: Path | None = concatenate_forward_reads(fastq_directory)
- temp_reverse: Path | None = concatenate_reverse_reads(fastq_directory)
+ """Concatenate fastq files for a given sample in a directory and write the concatenated files to the output path.
+
+ Args:
+ sample_id: The identifier to identify the samples by it should be a unique identifier in the file name.
+ fastq_directory: The directory containing the fastq files.
+ forward_output_path: The path where the concatenated forward reads will be written.
+ reverse_output_path: The path where the concatenated reverse reads will be written.
+ remove_raw: If True, remove the raw fastq files after concatenation.
+ """
+ LOG.debug(
+ f"[Concatenation Service] Concatenating fastq files for {sample_id} in {fastq_directory}"
+ )
+ temp_forward: Path | None = concatenate_fastq_reads_for_direction(
+ directory=fastq_directory, sample_id=sample_id, direction=ReadDirection.FORWARD
+ )
+ temp_reverse: Path | None = concatenate_fastq_reads_for_direction(
+ directory=fastq_directory, sample_id=sample_id, direction=ReadDirection.REVERSE
+ )
if remove_raw:
remove_raw_fastqs(
+ sample_id=sample_id,
fastq_directory=fastq_directory,
forward_file=temp_forward,
reverse_file=temp_reverse,
diff --git a/cg/services/fastq_concatenation_service/utils.py b/cg/services/fastq_concatenation_service/utils.py
index b3196cd7cf..bfeb12c39e 100644
--- a/cg/services/fastq_concatenation_service/utils.py
+++ b/cg/services/fastq_concatenation_service/utils.py
@@ -8,8 +8,12 @@
from cg.constants import FileExtensions
-def concatenate_forward_reads(directory: Path) -> Path | None:
- fastqs: list[Path] = get_forward_read_fastqs(directory)
+def concatenate_fastq_reads_for_direction(
+ directory: Path, sample_id: str, direction: ReadDirection
+) -> Path | None:
+ fastqs: list[Path] = get_fastqs_by_direction(
+ fastq_directory=directory, direction=direction, sample_id=sample_id
+ )
if not fastqs:
return
output_file: Path = get_new_unique_file(directory)
@@ -18,31 +22,19 @@ def concatenate_forward_reads(directory: Path) -> Path | None:
return output_file
-def concatenate_reverse_reads(directory: Path) -> Path | None:
- fastqs: list[Path] = get_reverse_read_fastqs(directory)
- if not fastqs:
- return
- file: Path = get_new_unique_file(directory)
- concatenate(input_files=fastqs, output_file=file)
- validate_concatenation(input_files=fastqs, output_file=file)
- return file
-
-
def get_new_unique_file(directory: Path) -> Path:
unique_id = uuid.uuid4()
return Path(directory, f"{unique_id}{FileExtensions.FASTQ}{FileExtensions.GZIP}")
-def get_forward_read_fastqs(fastq_directory: Path) -> list[Path]:
- return get_fastqs_by_direction(fastq_directory=fastq_directory, direction=ReadDirection.FORWARD)
-
-
-def get_reverse_read_fastqs(fastq_directory: Path) -> list[Path]:
- return get_fastqs_by_direction(fastq_directory=fastq_directory, direction=ReadDirection.REVERSE)
-
-
-def get_fastqs_by_direction(fastq_directory: Path, direction: int) -> list[Path]:
- pattern = f".+_R{direction}_[0-9]+{FileExtensions.FASTQ}{FileExtensions.GZIP}"
+def get_fastqs_by_direction(fastq_directory: Path, direction: int, sample_id: str) -> list[Path]:
+ """Get fastq files by direction and sample id in a given directory.
+ args:
+ fastq_directory: Path: The directory containing the fastq files.
+ direction: int: The direction of the reads.
+ sample_id: str: The identifier to identify the samples by it should be a unique identifier in the file name.
+ """
+ pattern = f".*{sample_id}.*_R{direction}_[0-9]+{FileExtensions.FASTQ}{FileExtensions.GZIP}"
fastqs: list[Path] = []
for file in fastq_directory.iterdir():
if re.match(pattern, file.name):
@@ -72,17 +64,30 @@ def sort_files_by_name(files: list[Path]) -> list[Path]:
return sorted(files, key=lambda file: file.name)
-def file_can_be_removed(file: Path, forward_file: Path, reverse_file: Path) -> bool:
+def file_can_be_removed(file: Path, forward_file: Path, reverse_file: Path, sample_id: str) -> bool:
+ """
+ Check if a file can be removed.
+ args:
+ file: Path: The file to check.
+ forward_file: Path: The forward file.
+ reverse_file: Path: The reverse file.
+ sample_id: str: The identifier to identify the samples by it should be a unique identifier in the file name.
+ """
return (
f"{FileFormat.FASTQ}{FileExtensions.GZIP}" in file.name
+ and sample_id in file.name
and file != forward_file
and file != reverse_file
)
-def remove_raw_fastqs(fastq_directory: Path, forward_file: Path, reverse_file: Path) -> None:
+def remove_raw_fastqs(
+ fastq_directory: Path, forward_file: Path, reverse_file: Path, sample_id: str
+) -> None:
for file in fastq_directory.iterdir():
- if file_can_be_removed(file=file, forward_file=forward_file, reverse_file=reverse_file):
+ if file_can_be_removed(
+ file=file, forward_file=forward_file, reverse_file=reverse_file, sample_id=sample_id
+ ):
file.unlink()
@@ -92,23 +97,3 @@ def generate_concatenated_fastq_delivery_path(
return Path(
fastq_directory, f"{sample_name}_{direction}{FileExtensions.FASTQ}{FileExtensions.GZIP}"
)
-
-
-def generate_forward_concatenated_fastq_delivery_path(
- fastq_directory: Path, sample_name: str
-) -> Path:
- return generate_concatenated_fastq_delivery_path(
- fastq_directory=fastq_directory,
- sample_name=sample_name,
- direction=ReadDirection.FORWARD,
- )
-
-
-def generate_reverse_concatenated_fastq_delivery_path(
- fastq_directory: Path, sample_name: str
-) -> Path:
- return generate_concatenated_fastq_delivery_path(
- fastq_directory=fastq_directory,
- sample_name=sample_name,
- direction=ReadDirection.REVERSE,
- )
diff --git a/cg/services/orders/constants.py b/cg/services/orders/constants.py
new file mode 100644
index 0000000000..1023e4b122
--- /dev/null
+++ b/cg/services/orders/constants.py
@@ -0,0 +1,20 @@
+from cg.constants import Workflow
+from cg.models.orders.constants import OrderType
+
+ORDER_TYPE_WORKFLOW_MAP: dict[OrderType, Workflow] = {
+ OrderType.BALSAMIC: Workflow.BALSAMIC,
+ OrderType.BALSAMIC_UMI: Workflow.BALSAMIC_UMI,
+ OrderType.FASTQ: Workflow.RAW_DATA,
+ OrderType.FLUFFY: Workflow.FLUFFY,
+ OrderType.METAGENOME: Workflow.RAW_DATA,
+ OrderType.MICROBIAL_FASTQ: Workflow.RAW_DATA,
+ OrderType.MICROSALT: Workflow.MICROSALT,
+ OrderType.MIP_DNA: Workflow.MIP_DNA,
+ OrderType.MIP_RNA: Workflow.MIP_RNA,
+ OrderType.PACBIO_LONG_READ: Workflow.RAW_DATA,
+ OrderType.RML: Workflow.RAW_DATA,
+ OrderType.RNAFUSION: Workflow.RNAFUSION,
+ OrderType.SARS_COV_2: Workflow.MUTANT,
+ OrderType.TAXPROFILER: Workflow.TAXPROFILER,
+ OrderType.TOMTE: Workflow.TOMTE,
+}
diff --git a/cg/services/orders/lims_service/service.py b/cg/services/orders/lims_service/service.py
new file mode 100644
index 0000000000..28d5c255e5
--- /dev/null
+++ b/cg/services/orders/lims_service/service.py
@@ -0,0 +1,65 @@
+import logging
+
+from cg.apps.lims import LimsAPI
+from cg.constants import DataDelivery, Workflow
+from cg.models.lims.sample import LimsSample
+from cg.services.orders.validation.models.sample import Sample
+
+LOG = logging.getLogger(__name__)
+
+
+class OrderLimsService:
+
+ def __init__(self, lims_api: LimsAPI):
+ self.lims_api = lims_api
+
+ @staticmethod
+ def _build_lims_sample(
+ customer: str,
+ samples: list[Sample],
+ workflow: Workflow,
+ delivery_type: DataDelivery,
+ skip_reception_control: bool,
+ ) -> list[LimsSample]:
+ """Convert order input to LIMS interface input."""
+ samples_lims = []
+ for sample in samples:
+ dict_sample = sample.model_dump()
+ LOG.debug(f"{sample.name}: prepare LIMS input")
+ dict_sample["customer"] = customer
+ dict_sample["data_analysis"] = workflow
+ dict_sample["data_delivery"] = delivery_type
+ dict_sample["family_name"] = sample._case_name
+ if skip_reception_control:
+ dict_sample["skip_reception_control"] = True
+ lims_sample: LimsSample = LimsSample.parse_obj(dict_sample)
+ samples_lims.append(lims_sample)
+ return samples_lims
+
+ def process_lims(
+ self,
+ samples: list[Sample],
+ customer: str,
+ ticket: int | None,
+ order_name: str,
+ workflow: Workflow,
+ delivery_type: DataDelivery,
+ skip_reception_control: bool,
+ ) -> tuple[any, dict]:
+ """Process samples to add them to LIMS."""
+ samples_lims: list[LimsSample] = self._build_lims_sample(
+ customer=customer,
+ samples=samples,
+ workflow=workflow,
+ delivery_type=delivery_type,
+ skip_reception_control=skip_reception_control,
+ )
+ project_name: str = str(ticket) or order_name
+ # Create new lims project
+ project_data = self.lims_api.submit_project(
+ project_name, [lims_sample.dict() for lims_sample in samples_lims]
+ )
+ lims_map: dict[str, str] = self.lims_api.get_samples(
+ projectlimsid=project_data["id"], map_ids=True
+ )
+ return project_data, lims_map
diff --git a/cg/services/orders/order_lims_service/order_lims_service.py b/cg/services/orders/order_lims_service/order_lims_service.py
deleted file mode 100644
index 03d9210400..0000000000
--- a/cg/services/orders/order_lims_service/order_lims_service.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import logging
-
-from cg.apps.lims import LimsAPI
-from cg.models.lims.sample import LimsSample
-from cg.models.orders.order import OrderIn
-from cg.models.orders.samples import OrderInSample
-
-LOG = logging.getLogger(__name__)
-
-
-class OrderLimsService:
-
- def __init__(self, lims_api: LimsAPI):
- self.lims_api = lims_api
-
- @staticmethod
- def _build_lims_sample(customer: str, samples: list[OrderInSample]) -> list[LimsSample]:
- """Convert order input to lims interface input."""
- samples_lims = []
- for sample in samples:
- dict_sample = sample.__dict__
- LOG.debug(f"{sample.name}: prepare LIMS input")
- dict_sample["customer"] = customer
- lims_sample: LimsSample = LimsSample.parse_obj(dict_sample)
- samples_lims.append(lims_sample)
- return samples_lims
-
- def process_lims(self, lims_order: OrderIn, new_samples: list[OrderInSample]):
- """Process samples to add them to LIMS."""
- samples_lims: list[LimsSample] = self._build_lims_sample(
- lims_order.customer, samples=new_samples
- )
- project_name: str = lims_order.ticket or lims_order.name
- # Create new lims project
- project_data = self.lims_api.submit_project(
- project_name, [lims_sample.dict() for lims_sample in samples_lims]
- )
- lims_map = self.lims_api.get_samples(projectlimsid=project_data["id"], map_ids=True)
- return project_data, lims_map
diff --git a/cg/services/orders/order_service/order_service.py b/cg/services/orders/order_service/order_service.py
index 11d5ee9fa1..d3c7a32921 100644
--- a/cg/services/orders/order_service/order_service.py
+++ b/cg/services/orders/order_service/order_service.py
@@ -1,13 +1,10 @@
from cg.server.dto.orders.orders_request import OrdersRequest
-from cg.server.dto.orders.orders_response import Order as OrderResponse, Order
-from cg.server.dto.orders.orders_response import OrdersResponse
+from cg.server.dto.orders.orders_response import Order, OrdersResponse
from cg.services.orders.order_service.models import OrderQueryParams
from cg.services.orders.order_summary_service.dto.order_summary import OrderSummary
-from cg.services.orders.order_summary_service.order_summary_service import (
- OrderSummaryService,
-)
+from cg.services.orders.order_summary_service.order_summary_service import OrderSummaryService
+from cg.store.models import Order as DbOrder
from cg.store.store import Store
-from cg.store.models import Order as DatabaseOrder
class OrderService:
@@ -15,8 +12,8 @@ def __init__(self, store: Store, status_service: OrderSummaryService) -> None:
self.store = store
self.summary_service = status_service
- def get_order(self, order_id: int) -> OrderResponse:
- order: Order = self.store.get_order_by_id(order_id)
+ def get_order(self, order_id: int) -> Order:
+ order: DbOrder = self.store.get_order_by_id(order_id)
summary: OrderSummary = self.summary_service.get_summary(order_id)
return self._create_order_response(order=order, summary=summary)
@@ -29,13 +26,13 @@ def get_orders(self, orders_request: OrdersRequest) -> OrdersResponse:
summaries: list[OrderSummary] = self.summary_service.get_summaries(order_ids)
return self._create_orders_response(orders=orders, summaries=summaries, total=total_count)
- def set_open(self, order_id: int, open: bool) -> OrderResponse:
- order: Order = self.store.update_order_status(order_id=order_id, open=open)
+ def set_open(self, order_id: int, open: bool) -> Order:
+ order: DbOrder = self.store.update_order_status(order_id=order_id, open=open)
return self._create_order_response(order)
def update_is_open(self, order_id: int, delivered_analyses: int) -> None:
"""Update the is_open parameter of an order based on the number of delivered analyses."""
- order: Order = self.store.get_order_by_id(order_id)
+ order: DbOrder = self.store.get_order_by_id(order_id)
case_count: int = len(order.cases)
if self._is_order_closed(case_count=case_count, delivered_analyses=delivered_analyses):
self.set_open(order_id=order_id, open=False)
@@ -55,7 +52,7 @@ def _get_order_query_params(orders_request: OrdersRequest) -> OrderQueryParams:
)
@staticmethod
- def _create_order_response(order: DatabaseOrder, summary: OrderSummary | None = None) -> Order:
+ def _create_order_response(order: DbOrder, summary: OrderSummary | None = None) -> Order:
return Order(
customer_id=order.customer.internal_id,
ticket_id=order.ticket_id,
@@ -67,7 +64,7 @@ def _create_order_response(order: DatabaseOrder, summary: OrderSummary | None =
)
def _create_orders_response(
- self, orders: list[DatabaseOrder], summaries: list[OrderSummary], total: int
+ self, orders: list[DbOrder], summaries: list[OrderSummary], total: int
) -> OrdersResponse:
orders: list[Order] = [self._create_order_response(order) for order in orders]
self._add_summaries(orders=orders, summaries=summaries)
diff --git a/cg/services/orders/store_order_services/store_case_order.py b/cg/services/orders/store_order_services/store_case_order.py
deleted file mode 100644
index 41ff3041e0..0000000000
--- a/cg/services/orders/store_order_services/store_case_order.py
+++ /dev/null
@@ -1,305 +0,0 @@
-import logging
-from datetime import datetime
-
-from cg.constants import Priority, Workflow
-from cg.constants.constants import CaseActions, DataDelivery
-from cg.constants.pedigree import Pedigree
-from cg.models.orders.order import OrderIn
-from cg.models.orders.samples import Of1508Sample
-from cg.services.orders.order_lims_service.order_lims_service import OrderLimsService
-from cg.services.orders.submitters.order_submitter import StoreOrderService
-from cg.store.models import (
- ApplicationVersion,
- Case,
- CaseSample,
- Customer,
- Order,
- Sample,
-)
-from cg.store.store import Store
-
-LOG = logging.getLogger(__name__)
-
-
-class StoreCaseOrderService(StoreOrderService):
- """
- Service for storing generic orders in StatusDB and Lims.
- This class is used to store orders for the following workflows:
- - Balsamic
- - Balsamic QC
- - Balsamic UMI
- - MIP DNA
- - MIP RNA
- - Tomte
- """
-
- def __init__(
- self,
- status_db: Store,
- lims_service: OrderLimsService,
- ):
- self.status_db = status_db
- self.lims = lims_service
-
- def store_order(self, order: OrderIn) -> dict:
- """Submit a batch of samples for sequencing and analysis."""
- return self._process_case_samples(order=order)
-
- def _process_case_samples(self, order: OrderIn) -> dict:
- """Process samples to be analyzed."""
- project_data = lims_map = None
-
- # submit new samples to lims
- new_samples = [sample for sample in order.samples if sample.internal_id is None]
- if new_samples:
- project_data, lims_map = self.lims.process_lims(
- lims_order=order, new_samples=new_samples
- )
-
- status_data = self.order_to_status(order=order)
- samples = [sample for family in status_data["families"] for sample in family["samples"]]
- if lims_map:
- self._fill_in_sample_ids(samples=samples, lims_map=lims_map)
-
- new_cases: list[Case] = self.store_items_in_status(
- customer_id=status_data["customer"],
- order=status_data["order"],
- ordered=project_data["date"] if project_data else datetime.now(),
- ticket_id=order.ticket,
- items=status_data["families"],
- )
- return {"project": project_data, "records": new_cases}
-
- @staticmethod
- def _group_cases(samples: list[Of1508Sample]) -> dict:
- """Group samples in cases."""
- cases = {}
- for sample in samples:
- case_id = sample.family_name
- if case_id not in cases:
- cases[case_id] = []
- cases[case_id].append(sample)
- return cases
-
- @staticmethod
- def _get_single_value(case_name, case_samples, value_key, value_default=None):
- values = set(getattr(sample, value_key) or value_default for sample in case_samples)
- if len(values) > 1:
- raise ValueError(f"different sample {value_key} values: {case_name} - {values}")
- single_value = values.pop()
- return single_value
-
- def order_to_status(self, order: OrderIn) -> dict:
- """Converts order input to status interface input for MIP-DNA, MIP-RNA and Balsamic."""
- status_data = {"customer": order.customer, "order": order.name, "families": []}
- cases = self._group_cases(order.samples)
-
- for case_name, case_samples in cases.items():
- case_internal_id: str = self._get_single_value(
- case_name, case_samples, "case_internal_id"
- )
- cohorts: set[str] = {
- cohort for sample in case_samples for cohort in sample.cohorts if cohort
- }
- data_analysis = self._get_single_value(case_name, case_samples, "data_analysis")
- data_delivery = self._get_single_value(case_name, case_samples, "data_delivery")
-
- panels: set[str] = set()
- if data_analysis in [Workflow.MIP_DNA, Workflow.TOMTE]:
- panels: set[str] = {
- panel for sample in case_samples for panel in sample.panels if panel
- }
-
- priority = self._get_single_value(
- case_name, case_samples, "priority", Priority.standard.name
- )
- synopsis: str = self._get_single_value(case_name, case_samples, "synopsis")
-
- case = {
- "cohorts": list(cohorts),
- "data_analysis": data_analysis,
- "data_delivery": data_delivery,
- "internal_id": case_internal_id,
- "name": case_name,
- "panels": list(panels),
- "priority": priority,
- "samples": [
- {
- "age_at_sampling": sample.age_at_sampling,
- "application": sample.application,
- "capture_kit": sample.capture_kit,
- "comment": sample.comment,
- "control": sample.control,
- "father": sample.father,
- "internal_id": sample.internal_id,
- "mother": sample.mother,
- "name": sample.name,
- "phenotype_groups": list(sample.phenotype_groups),
- "phenotype_terms": list(sample.phenotype_terms),
- "reference_genome": (
- sample.reference_genome if hasattr(sample, "reference_genome") else None
- ),
- "sex": sample.sex,
- "status": sample.status if hasattr(sample, "status") else None,
- "subject_id": sample.subject_id,
- "tumour": sample.tumour,
- }
- for sample in case_samples
- ],
- "synopsis": synopsis,
- }
-
- status_data["families"].append(case)
- return status_data
-
- def store_items_in_status(
- self, customer_id: str, order: str, ordered: datetime, ticket_id: str, items: list[dict]
- ) -> list[Case]:
- """Store cases, samples and their relationship in the Status database."""
- customer: Customer = self.status_db.get_customer_by_internal_id(
- customer_internal_id=customer_id
- )
- new_cases: list[Case] = []
- status_db_order = Order(
- customer=customer,
- order_date=datetime.now(),
- ticket_id=int(ticket_id),
- )
- for case in items:
- status_db_case: Case = self.status_db.get_case_by_internal_id(
- internal_id=case["internal_id"]
- )
- if not status_db_case:
- new_case: Case = self._create_case(
- case=case, customer_obj=customer, ticket=ticket_id
- )
- new_cases.append(new_case)
- self._update_case_panel(panels=case["panels"], case=new_case)
- status_db_case: Case = new_case
- else:
- self._append_ticket(ticket_id=ticket_id, case=status_db_case)
- self._update_action(action=CaseActions.ANALYZE, case=status_db_case)
- self._update_case_panel(panels=case["panels"], case=status_db_case)
- case_samples: dict[str, Sample] = {}
- status_db_order.cases.append(status_db_case)
- for sample in case["samples"]:
- existing_sample: Sample = self.status_db.get_sample_by_internal_id(
- internal_id=sample["internal_id"]
- )
- if not existing_sample:
- new_sample: Sample = self._create_sample(
- case=case,
- customer_obj=customer,
- order=order,
- ordered=ordered,
- sample=sample,
- ticket=ticket_id,
- )
- case_samples[sample["name"]] = new_sample
- else:
- case_samples[sample["name"]] = existing_sample
-
- for sample in case["samples"]:
- sample_mother: Sample = case_samples.get(sample.get(Pedigree.MOTHER))
- sample_father: Sample = case_samples.get(sample.get(Pedigree.FATHER))
- with self.status_db.session.no_autoflush:
- case_sample: CaseSample = self.status_db.get_case_sample_link(
- case_internal_id=status_db_case.internal_id,
- sample_internal_id=sample["internal_id"],
- )
- if not case_sample:
- case_sample: CaseSample = self._create_link(
- case_obj=status_db_case,
- family_samples=case_samples,
- father_obj=sample_father,
- mother_obj=sample_mother,
- sample=sample,
- )
-
- self._update_relationship(
- father_obj=sample_father,
- link_obj=case_sample,
- mother_obj=sample_mother,
- sample=sample,
- )
- self.status_db.session.add_all(new_cases)
- self.status_db.session.add(status_db_order)
- self.status_db.session.commit()
- return new_cases
-
- @staticmethod
- def _update_case_panel(panels: list[str], case: Case) -> None:
- """Update case panels."""
- case.panels = panels
-
- @staticmethod
- def _append_ticket(ticket_id: str, case: Case) -> None:
- """Add a ticket to the case."""
- case.tickets = f"{case.tickets},{ticket_id}"
-
- @staticmethod
- def _update_action(action: str, case: Case) -> None:
- """Update action of a case."""
- case.action = action
-
- @staticmethod
- def _update_relationship(father_obj, link_obj, mother_obj, sample):
- link_obj.status = sample["status"] or link_obj.status
- link_obj.mother = mother_obj or link_obj.mother
- link_obj.father = father_obj or link_obj.father
-
- def _create_link(self, case_obj, family_samples, father_obj, mother_obj, sample):
- link_obj = self.status_db.relate_sample(
- case=case_obj,
- sample=family_samples[sample["name"]],
- status=sample["status"],
- mother=mother_obj,
- father=father_obj,
- )
- self.status_db.session.add(link_obj)
- return link_obj
-
- def _create_sample(self, case, customer_obj, order, ordered, sample, ticket):
- sample_obj = self.status_db.add_sample(
- name=sample["name"],
- comment=sample["comment"],
- control=sample["control"],
- internal_id=sample["internal_id"],
- order=order,
- ordered=ordered,
- original_ticket=ticket,
- tumour=sample["tumour"],
- age_at_sampling=sample["age_at_sampling"],
- capture_kit=sample["capture_kit"],
- phenotype_groups=sample["phenotype_groups"],
- phenotype_terms=sample["phenotype_terms"],
- priority=case["priority"],
- reference_genome=sample["reference_genome"],
- sex=sample["sex"],
- subject_id=sample["subject_id"],
- )
- sample_obj.customer = customer_obj
- with self.status_db.session.no_autoflush:
- application_tag = sample["application"]
- sample_obj.application_version: ApplicationVersion = (
- self.status_db.get_current_application_version_by_tag(tag=application_tag)
- )
- self.status_db.session.add(sample_obj)
- return sample_obj
-
- def _create_case(self, case: dict, customer_obj: Customer, ticket: str):
- case_obj = self.status_db.add_case(
- cohorts=case["cohorts"],
- data_analysis=Workflow(case["data_analysis"]),
- data_delivery=DataDelivery(case["data_delivery"]),
- name=case["name"],
- priority=case["priority"],
- synopsis=case["synopsis"],
- ticket=ticket,
- )
- case_obj.customer = customer_obj
- return case_obj
-
- @staticmethod
- def _is_rerun_of_existing_case(sample: Of1508Sample) -> bool:
- return sample.case_internal_id is not None
diff --git a/cg/services/orders/store_order_services/store_fastq_order_service.py b/cg/services/orders/store_order_services/store_fastq_order_service.py
deleted file mode 100644
index 8699c7a4b4..0000000000
--- a/cg/services/orders/store_order_services/store_fastq_order_service.py
+++ /dev/null
@@ -1,150 +0,0 @@
-import logging
-from datetime import datetime
-
-from cg.constants import DataDelivery, GenePanelMasterList, Priority, Workflow
-from cg.constants.constants import CustomerId
-from cg.constants.sequencing import SeqLibraryPrepCategory
-from cg.exc import OrderError
-from cg.models.orders.order import OrderIn
-from cg.models.orders.sample_base import StatusEnum
-from cg.services.orders.order_lims_service.order_lims_service import OrderLimsService
-from cg.services.orders.submitters.order_submitter import StoreOrderService
-from cg.store.models import ApplicationVersion, Case, CaseSample, Customer, Order, Sample
-from cg.store.store import Store
-
-LOG = logging.getLogger(__name__)
-
-
-class StoreFastqOrderService(StoreOrderService):
- """Storing service for FASTQ orders."""
-
- def __init__(self, status_db: Store, lims_service: OrderLimsService):
- self.status_db = status_db
- self.lims = lims_service
-
- def store_order(self, order: OrderIn) -> dict:
- """Submit a batch of samples for FASTQ delivery."""
-
- project_data, lims_map = self.lims.process_lims(lims_order=order, new_samples=order.samples)
- status_data = self.order_to_status(order)
- self._fill_in_sample_ids(samples=status_data["samples"], lims_map=lims_map)
- new_samples = self.store_items_in_status(
- customer_id=status_data["customer"],
- order=status_data["order"],
- ordered=project_data["date"],
- ticket_id=order.ticket,
- items=status_data["samples"],
- )
- return {"project": project_data, "records": new_samples}
-
- @staticmethod
- def order_to_status(order: OrderIn) -> dict:
- """Convert order input to status for fastq-only orders."""
- status_data = {
- "customer": order.customer,
- "order": order.name,
- "samples": [
- {
- "application": sample.application,
- "capture_kit": sample.capture_kit,
- "comment": sample.comment,
- "data_analysis": sample.data_analysis,
- "data_delivery": sample.data_delivery,
- "name": sample.name,
- "priority": sample.priority,
- "sex": sample.sex,
- "subject_id": sample.subject_id,
- "tumour": sample.tumour,
- "volume": sample.volume,
- }
- for sample in order.samples
- ],
- }
- return status_data
-
- def create_maf_case(self, sample_obj: Sample, order: Order) -> None:
- """Add a MAF case to the Status database."""
- case: Case = self.status_db.add_case(
- data_analysis=Workflow(Workflow.MIP_DNA),
- data_delivery=DataDelivery(DataDelivery.NO_DELIVERY),
- name="_".join([sample_obj.name, "MAF"]),
- panels=[GenePanelMasterList.OMIM_AUTO],
- priority=Priority.research,
- ticket=sample_obj.original_ticket,
- )
- case.customer = self.status_db.get_customer_by_internal_id(
- customer_internal_id=CustomerId.CG_INTERNAL_CUSTOMER
- )
- relationship: CaseSample = self.status_db.relate_sample(
- case=case, sample=sample_obj, status=StatusEnum.unknown
- )
- order.cases.append(case)
- self.status_db.session.add_all([case, relationship])
-
- def store_items_in_status(
- self, customer_id: str, order: str, ordered: datetime, ticket_id: str, items: list[dict]
- ) -> list[Sample]:
- """Store fastq samples in the status database including family connection and delivery"""
- customer: Customer = self.status_db.get_customer_by_internal_id(
- customer_internal_id=customer_id
- )
- if not customer:
- raise OrderError(f"Unknown customer: {customer_id}")
- new_samples = []
- case: Case = self.status_db.get_case_by_name_and_customer(
- customer=customer, case_name=ticket_id
- )
- submitted_case: dict = items[0]
- status_db_order = Order(
- customer=customer,
- order_date=datetime.now(),
- ticket_id=int(ticket_id),
- )
- with self.status_db.session.no_autoflush:
- for sample in items:
- new_sample = self.status_db.add_sample(
- name=sample["name"],
- sex=sample["sex"] or "unknown",
- comment=sample["comment"],
- internal_id=sample.get("internal_id"),
- order=order,
- ordered=ordered,
- original_ticket=ticket_id,
- priority=sample["priority"],
- tumour=sample["tumour"],
- capture_kit=sample["capture_kit"],
- subject_id=sample["subject_id"],
- )
- new_sample.customer: Customer = customer
- application_tag: str = sample["application"]
- application_version: ApplicationVersion = (
- self.status_db.get_current_application_version_by_tag(tag=application_tag)
- )
- if application_version is None:
- raise OrderError(f"Invalid application: {sample['application']}")
- new_sample.application_version: ApplicationVersion = application_version
- new_samples.append(new_sample)
- if not case:
- case = self.status_db.add_case(
- data_analysis=Workflow(submitted_case["data_analysis"]),
- data_delivery=DataDelivery(submitted_case["data_delivery"]),
- name=ticket_id,
- panels=None,
- priority=submitted_case["priority"],
- ticket=ticket_id,
- )
- if (
- not new_sample.is_tumour
- and new_sample.prep_category == SeqLibraryPrepCategory.WHOLE_GENOME_SEQUENCING
- ):
- self.create_maf_case(sample_obj=new_sample, order=status_db_order)
- case.customer = customer
- new_relationship = self.status_db.relate_sample(
- case=case, sample=new_sample, status=StatusEnum.unknown
- )
- self.status_db.session.add_all([case, new_relationship])
- status_db_order.cases.append(case)
- self.status_db.session.add(status_db_order)
- self.status_db.session.add_all(new_samples)
- self.status_db.session.commit()
- return new_samples
diff --git a/cg/services/orders/store_order_services/store_metagenome_order.py b/cg/services/orders/store_order_services/store_metagenome_order.py
deleted file mode 100644
index 38c00e3ef2..0000000000
--- a/cg/services/orders/store_order_services/store_metagenome_order.py
+++ /dev/null
@@ -1,138 +0,0 @@
-import logging
-from datetime import datetime
-
-from cg.constants import DataDelivery, Sex, Workflow
-from cg.exc import OrderError
-from cg.models.orders.order import OrderIn
-from cg.models.orders.sample_base import StatusEnum
-from cg.services.orders.order_lims_service.order_lims_service import OrderLimsService
-from cg.services.orders.submitters.order_submitter import StoreOrderService
-from cg.store.models import (
- ApplicationVersion,
- Case,
- CaseSample,
- Customer,
- Order,
- Sample,
-)
-from cg.store.store import Store
-
-LOG = logging.getLogger(__name__)
-
-
-class StoreMetagenomeOrderService(StoreOrderService):
- """Storing service for metagenome orders."""
-
- def __init__(self, status_db: Store, lims_service: OrderLimsService):
- self.status_db = status_db
- self.lims = lims_service
-
- def store_order(self, order: OrderIn) -> dict:
- """Submit a batch of metagenome samples."""
- project_data, lims_map = self.lims.process_lims(lims_order=order, new_samples=order.samples)
- status_data = self.order_to_status(order)
- self._fill_in_sample_ids(samples=status_data["families"][0]["samples"], lims_map=lims_map)
- new_samples = self.store_items_in_status(
- customer_id=status_data["customer"],
- order=status_data["order"],
- ordered=project_data["date"],
- ticket_id=order.ticket,
- items=status_data["families"],
- )
- return {"project": project_data, "records": new_samples}
-
- @staticmethod
- def order_to_status(order: OrderIn) -> dict:
- """Convert order input to status for metagenome orders."""
- return {
- "customer": order.customer,
- "order": order.name,
- "families": [
- {
- "data_analysis": order.samples[0].data_analysis,
- "data_delivery": order.samples[0].data_delivery,
- "priority": order.samples[0].priority,
- "samples": [
- {
- "application": sample.application,
- "comment": sample.comment,
- "control": sample.control,
- "name": sample.name,
- "priority": sample.priority,
- "volume": sample.volume,
- }
- for sample in order.samples
- ],
- }
- ],
- }
-
- def store_items_in_status(
- self,
- customer_id: str,
- order: str,
- ordered: datetime,
- ticket_id: str,
- items: list[dict],
- ) -> list[Sample]:
- """Store samples in the status database."""
- customer: Customer = self.status_db.get_customer_by_internal_id(
- customer_internal_id=customer_id
- )
- if customer is None:
- raise OrderError(f"unknown customer: {customer_id}")
- new_samples = []
- case: Case = self.status_db.get_case_by_name_and_customer(
- customer=customer, case_name=str(ticket_id)
- )
- case_dict: dict = items[0]
- status_db_order = Order(
- customer=customer,
- order_date=datetime.now(),
- ticket_id=int(ticket_id),
- )
- with self.status_db.session.no_autoflush:
- for sample in case_dict["samples"]:
- new_sample = self.status_db.add_sample(
- name=sample["name"],
- sex=Sex.UNKNOWN,
- comment=sample["comment"],
- control=sample["control"],
- internal_id=sample.get("internal_id"),
- order=order,
- ordered=ordered,
- original_ticket=ticket_id,
- priority=sample["priority"],
- )
- new_sample.customer: Customer = customer
- application_tag: str = sample["application"]
- application_version: ApplicationVersion = (
- self.status_db.get_current_application_version_by_tag(tag=application_tag)
- )
- if application_version is None:
- raise OrderError(f"Invalid application: {sample['application']}")
- new_sample.application_version: ApplicationVersion = application_version
- new_samples.append(new_sample)
-
- if not case:
- case = self.status_db.add_case(
- data_analysis=Workflow(case_dict["data_analysis"]),
- data_delivery=DataDelivery(case_dict["data_delivery"]),
- name=str(ticket_id),
- panels=None,
- priority=case_dict["priority"],
- ticket=ticket_id,
- )
- case.customer = customer
- self.status_db.session.add(case)
- self.status_db.session.commit()
-
- new_relationship: CaseSample = self.status_db.relate_sample(
- case=case, sample=new_sample, status=StatusEnum.unknown
- )
- self.status_db.session.add(new_relationship)
- status_db_order.cases.append(case)
- self.status_db.session.add(status_db_order)
- self.status_db.session.add_all(new_samples)
- self.status_db.session.commit()
- return new_samples
diff --git a/cg/services/orders/store_order_services/store_microbial_fastq_order_service.py b/cg/services/orders/store_order_services/store_microbial_fastq_order_service.py
deleted file mode 100644
index 3ff3e68048..0000000000
--- a/cg/services/orders/store_order_services/store_microbial_fastq_order_service.py
+++ /dev/null
@@ -1,137 +0,0 @@
-from datetime import datetime
-
-from cg.constants import DataDelivery, SexOptions, Workflow
-from cg.models.orders.order import OrderIn
-from cg.models.orders.sample_base import StatusEnum
-from cg.services.orders.order_lims_service.order_lims_service import OrderLimsService
-from cg.services.orders.submitters.order_submitter import StoreOrderService
-from cg.store.exc import EntryNotFoundError
-from cg.store.models import Case, CaseSample, Customer, Order, Sample
-from cg.store.store import Store
-
-
-class StoreMicrobialFastqOrderService(StoreOrderService):
-
- def __init__(self, status_db: Store, lims_service: OrderLimsService):
- self.status_db = status_db
- self.lims = lims_service
-
- def store_order(self, order: OrderIn) -> dict:
- project_data, lims_map = self.lims.process_lims(lims_order=order, new_samples=order.samples)
- status_data: dict = self.order_to_status(order)
- self._fill_in_sample_ids(samples=status_data["samples"], lims_map=lims_map)
- new_samples: list[Sample] = self.store_items_in_status(
- customer_id=status_data["customer"],
- order=status_data["order"],
- ordered=project_data["date"] if project_data else datetime.now(),
- ticket_id=order.ticket,
- items=status_data["samples"],
- )
- return {"project": project_data, "records": new_samples}
-
- @staticmethod
- def order_to_status(order: OrderIn) -> dict:
- """Convert order input for microbial samples."""
- return {
- "customer": order.customer,
- "order": order.name,
- "comment": order.comment,
- "samples": [
- {
- "application": sample.application,
- "comment": sample.comment,
- "internal_id": sample.internal_id,
- "data_analysis": sample.data_analysis,
- "data_delivery": sample.data_delivery,
- "name": sample.name,
- "priority": sample.priority,
- "volume": sample.volume,
- "control": sample.control,
- }
- for sample in order.samples
- ],
- }
-
- def store_items_in_status(
- self,
- customer_id: str,
- order: str,
- ordered: datetime,
- ticket_id: str,
- items: list[dict],
- ) -> list[Sample]:
- customer: Customer = self._get_customer(customer_id)
- new_samples: list[Sample] = []
- status_db_order = Order(
- customer=customer,
- order_date=datetime.now(),
- ticket_id=int(ticket_id),
- )
- for sample in items:
- case_name: str = f'{sample["name"]}-case'
- case: Case = self._create_case_for_sample(
- sample=sample, customer=customer, case_name=case_name, ticket_id=ticket_id
- )
- db_sample: Sample = self._create_sample(
- sample_dict=sample,
- order=order,
- ordered=ordered,
- ticket_id=ticket_id,
- customer=customer,
- )
- db_sample = self._add_application_to_sample(
- sample=db_sample, application_tag=sample["application"]
- )
- case_sample: CaseSample = self.status_db.relate_sample(
- case=case, sample=db_sample, status=StatusEnum.unknown
- )
- status_db_order.cases.append(case)
- self.status_db.add_multiple_items_to_store([case, db_sample, case_sample])
- new_samples.append(db_sample)
- self.status_db.session.add(status_db_order)
- self.status_db.commit_to_store()
- return new_samples
-
- def _get_customer(self, customer_id: str) -> Customer:
- if customer := self.status_db.get_customer_by_internal_id(customer_id):
- return customer
- raise EntryNotFoundError(f"could not find customer: {customer_id}")
-
- def _create_case_for_sample(
- self, sample: dict, customer: Customer, case_name: str, ticket_id: str
- ) -> Case:
- if self.status_db.get_case_by_name_and_customer(case_name=case_name, customer=customer):
- raise ValueError(f"Case already exists: {case_name}.")
- case: Case = self.status_db.add_case(
- data_analysis=Workflow.RAW_DATA,
- data_delivery=DataDelivery.FASTQ,
- name=case_name,
- priority=sample["priority"],
- ticket=ticket_id,
- )
- case.customer = customer
- return case
-
- def _create_sample(
- self, sample_dict: dict, order, ordered, ticket_id: str, customer: Customer
- ) -> Sample:
-
- return self.status_db.add_sample(
- name=sample_dict["name"],
- customer=customer,
- sex=SexOptions.UNKNOWN,
- comment=sample_dict["comment"],
- internal_id=sample_dict["internal_id"],
- order=order,
- ordered=ordered,
- original_ticket=ticket_id,
- priority=sample_dict["priority"],
- )
-
- def _add_application_to_sample(self, sample: Sample, application_tag: str) -> Sample:
- if application_version := self.status_db.get_current_application_version_by_tag(
- tag=application_tag
- ):
- sample.application_version = application_version
- return sample
- raise EntryNotFoundError(f"Invalid application: {application_tag}")
diff --git a/cg/services/orders/store_order_services/store_microbial_order.py b/cg/services/orders/store_order_services/store_microbial_order.py
deleted file mode 100644
index 7d79f99fba..0000000000
--- a/cg/services/orders/store_order_services/store_microbial_order.py
+++ /dev/null
@@ -1,187 +0,0 @@
-import logging
-from datetime import datetime
-
-from cg.constants import DataDelivery, Sex, Workflow
-from cg.models.orders.order import OrderIn
-from cg.models.orders.samples import MicrobialSample
-from cg.services.orders.order_lims_service.order_lims_service import OrderLimsService
-from cg.services.orders.submitters.order_submitter import StoreOrderService
-from cg.store.models import (
- ApplicationVersion,
- Case,
- CaseSample,
- Customer,
- Order,
- Organism,
- Sample,
-)
-from cg.store.store import Store
-
-LOG = logging.getLogger(__name__)
-
-
-class StoreMicrobialOrderService(StoreOrderService):
- """
- Storing service for microbial orders.
- These include:
- - Mutant samples
- - Microsalt samples
- - Sars-Cov-2 samples
- """
-
- def __init__(self, status_db: Store, lims_service: OrderLimsService):
- self.status = status_db
- self.lims = lims_service
-
- def store_order(self, order: OrderIn) -> dict:
- self._fill_in_sample_verified_organism(order.samples)
- # submit samples to LIMS
- project_data, lims_map = self.lims.process_lims(lims_order=order, new_samples=order.samples)
- # prepare order for status database
- status_data = self.order_to_status(order)
- self._fill_in_sample_ids(
- samples=status_data["samples"], lims_map=lims_map, id_key="internal_id"
- )
-
- # submit samples to Status
- samples = self.store_items_in_status(
- customer_id=status_data["customer"],
- order=status_data["order"],
- ordered=project_data["date"] if project_data else datetime.now(),
- ticket_id=order.ticket,
- items=status_data["samples"],
- comment=status_data["comment"],
- data_analysis=Workflow(status_data["data_analysis"]),
- data_delivery=DataDelivery(status_data["data_delivery"]),
- )
- return {"project": project_data, "records": samples}
-
- @staticmethod
- def order_to_status(order: OrderIn) -> dict:
- """Convert order input for microbial samples."""
-
- status_data = {
- "customer": order.customer,
- "order": order.name,
- "comment": order.comment,
- "data_analysis": order.samples[0].data_analysis,
- "data_delivery": order.samples[0].data_delivery,
- "samples": [
- {
- "application": sample.application,
- "comment": sample.comment,
- "control": sample.control,
- "name": sample.name,
- "organism_id": sample.organism,
- "priority": sample.priority,
- "reference_genome": sample.reference_genome,
- "volume": sample.volume,
- }
- for sample in order.samples
- ],
- }
- return status_data
-
- def store_items_in_status(
- self,
- comment: str,
- customer_id: str,
- data_analysis: Workflow,
- data_delivery: DataDelivery,
- order: str,
- ordered: datetime,
- items: list[dict],
- ticket_id: str,
- ) -> [Sample]:
- """Store microbial samples in the status database."""
-
- sample_objs = []
-
- customer: Customer = self.status.get_customer_by_internal_id(
- customer_internal_id=customer_id
- )
- new_samples = []
- status_db_order = Order(
- customer=customer,
- order_date=datetime.now(),
- ticket_id=int(ticket_id),
- )
-
- with self.status.session.no_autoflush:
- for sample_data in items:
- case: Case = self.status.get_case_by_name_and_customer(
- customer=customer, case_name=ticket_id
- )
-
- if not case:
- case: Case = self.status.add_case(
- data_analysis=data_analysis,
- data_delivery=data_delivery,
- name=ticket_id,
- panels=None,
- ticket=ticket_id,
- )
- case.customer = customer
- self.status.session.add(case)
- self.status.session.commit()
-
- application_tag: str = sample_data["application"]
- application_version: ApplicationVersion = (
- self.status.get_current_application_version_by_tag(tag=application_tag)
- )
- organism: Organism = self.status.get_organism_by_internal_id(
- sample_data["organism_id"]
- )
-
- if not organism:
- organism: Organism = self.status.add_organism(
- internal_id=sample_data["organism_id"],
- name=sample_data["organism_id"],
- reference_genome=sample_data["reference_genome"],
- )
- self.status.session.add(organism)
- self.status.session.commit()
-
- if comment:
- case.comment = f"Order comment: {comment}"
-
- new_sample = self.status.add_sample(
- name=sample_data["name"],
- sex=Sex.UNKNOWN,
- comment=sample_data["comment"],
- control=sample_data["control"],
- internal_id=sample_data.get("internal_id"),
- order=order,
- ordered=ordered,
- original_ticket=ticket_id,
- priority=sample_data["priority"],
- application_version=application_version,
- customer=customer,
- organism=organism,
- reference_genome=sample_data["reference_genome"],
- )
-
- priority = new_sample.priority
- sample_objs.append(new_sample)
- link: CaseSample = self.status.relate_sample(
- case=case, sample=new_sample, status="unknown"
- )
- self.status.session.add(link)
- new_samples.append(new_sample)
-
- case.priority = priority
- status_db_order.cases.append(case)
- self.status.session.add(status_db_order)
- self.status.session.add_all(new_samples)
- self.status.session.commit()
- return sample_objs
-
- def _fill_in_sample_verified_organism(self, samples: list[MicrobialSample]):
- for sample in samples:
- organism_id = sample.organism
- reference_genome = sample.reference_genome
- organism: Organism = self.status.get_organism_by_internal_id(internal_id=organism_id)
- is_verified = (
- organism and organism.reference_genome == reference_genome and organism.verified
- )
- sample.verified_organism = is_verified
diff --git a/cg/services/orders/store_order_services/store_pacbio_order_service.py b/cg/services/orders/store_order_services/store_pacbio_order_service.py
deleted file mode 100644
index 66e1c6080e..0000000000
--- a/cg/services/orders/store_order_services/store_pacbio_order_service.py
+++ /dev/null
@@ -1,113 +0,0 @@
-import logging
-from datetime import datetime
-
-from cg.constants import DataDelivery, Workflow
-from cg.models.orders.order import OrderIn
-from cg.models.orders.sample_base import SexEnum, StatusEnum
-from cg.services.orders.order_lims_service.order_lims_service import OrderLimsService
-from cg.services.orders.submitters.order_submitter import StoreOrderService
-from cg.store.models import ApplicationVersion, CaseSample, Customer, Order, Sample
-from cg.store.store import Store
-
-LOG = logging.getLogger(__name__)
-
-
-class StorePacBioOrderService(StoreOrderService):
- """Storing service for PacBio Long Read orders."""
-
- def __init__(self, status_db: Store, lims_service: OrderLimsService):
- self.status_db = status_db
- self.lims = lims_service
-
- def store_order(self, order: OrderIn) -> dict:
- """Submit a batch of samples for PacBio Long Read delivery."""
-
- project_data, lims_map = self.lims.process_lims(lims_order=order, new_samples=order.samples)
- status_data: dict = self.order_to_status(order)
- self._fill_in_sample_ids(samples=status_data["samples"], lims_map=lims_map)
- new_samples = self._store_samples_in_statusdb(
- customer_id=status_data["customer"],
- order=status_data["order"],
- ordered=project_data["date"],
- ticket_id=order.ticket,
- samples=status_data["samples"],
- )
- return {"project": project_data, "records": new_samples}
-
- @staticmethod
- def order_to_status(order: OrderIn) -> dict:
- """Convert order input to status for PacBio-only orders."""
- status_data = {
- "customer": order.customer,
- "order": order.name,
- "samples": [
- {
- "application": sample.application,
- "comment": sample.comment,
- "data_analysis": sample.data_analysis,
- "data_delivery": sample.data_delivery,
- "name": sample.name,
- "priority": sample.priority,
- "sex": sample.sex,
- "tumour": sample.tumour,
- "volume": sample.volume,
- "subject_id": sample.subject_id,
- }
- for sample in order.samples
- ],
- }
- return status_data
-
- def _store_samples_in_statusdb(
- self, customer_id: str, order: str, ordered: datetime, ticket_id: str, samples: list[dict]
- ) -> list[Sample]:
- """Store PacBio samples and cases in the StatusDB."""
- customer: Customer = self.status_db.get_customer_by_internal_id(
- customer_internal_id=customer_id
- )
- status_db_order = Order(
- customer=customer,
- order_date=datetime.now(),
- ticket_id=int(ticket_id),
- )
- new_samples = []
- with self.status_db.session.no_autoflush:
- for sample in samples:
- sample_name: str = sample["name"]
- new_sample = self.status_db.add_sample(
- name=sample_name,
- sex=sample["sex"] or SexEnum.unknown,
- comment=sample["comment"],
- internal_id=sample.get("internal_id"),
- order=order,
- ordered=ordered,
- original_ticket=ticket_id,
- priority=sample["priority"],
- tumour=sample["tumour"],
- subject_id=sample["subject_id"],
- )
- new_sample.customer = customer
- application_tag: str = sample["application"]
- application_version: ApplicationVersion = (
- self.status_db.get_current_application_version_by_tag(tag=application_tag)
- )
- new_sample.application_version = application_version
- new_samples.append(new_sample)
- case = self.status_db.add_case(
- data_analysis=Workflow(sample["data_analysis"]),
- data_delivery=DataDelivery(sample["data_delivery"]),
- name=f"{sample_name}-case",
- priority=sample["priority"],
- ticket=ticket_id,
- )
- case.customer = customer
- new_relationship: CaseSample = self.status_db.relate_sample(
- case=case, sample=new_sample, status=StatusEnum.unknown
- )
- status_db_order.cases.append(case)
- self.status_db.session.add_all([case, new_relationship])
-
- self.status_db.session.add(status_db_order)
- self.status_db.session.add_all(new_samples)
- self.status_db.session.commit()
- return new_samples
diff --git a/cg/services/orders/store_order_services/store_pool_order.py b/cg/services/orders/store_order_services/store_pool_order.py
deleted file mode 100644
index 6cb6d13a4c..0000000000
--- a/cg/services/orders/store_order_services/store_pool_order.py
+++ /dev/null
@@ -1,210 +0,0 @@
-import logging
-from datetime import datetime
-
-from cg.constants import DataDelivery, Workflow
-from cg.exc import OrderError
-from cg.models.orders.order import OrderIn
-from cg.models.orders.sample_base import SexEnum
-from cg.models.orders.samples import RmlSample
-from cg.services.orders.order_lims_service.order_lims_service import OrderLimsService
-from cg.services.orders.submitters.order_submitter import StoreOrderService
-from cg.store.models import (
- ApplicationVersion,
- Case,
- CaseSample,
- Customer,
- Order,
- Pool,
- Sample,
-)
-from cg.store.store import Store
-
-LOG = logging.getLogger(__name__)
-
-
-class StorePoolOrderService(StoreOrderService):
- """
- Storing service for pool orders.
- These include:
- - Fluffy / NIPT samples
- - RML samples
- """
-
- def __init__(self, status_db: Store, lims_service: OrderLimsService):
- self.status_db = status_db
- self.lims = lims_service
-
- def store_order(self, order: OrderIn) -> dict:
- status_data = self.order_to_status(order)
- project_data, lims_map = self.lims.process_lims(lims_order=order, new_samples=order.samples)
- samples = [sample for pool in status_data["pools"] for sample in pool["samples"]]
- self._fill_in_sample_ids(samples=samples, lims_map=lims_map, id_key="internal_id")
- new_records = self.store_items_in_status(
- customer_id=status_data["customer"],
- order=status_data["order"],
- ordered=project_data["date"],
- ticket_id=order.ticket,
- items=status_data["pools"],
- )
- return {"project": project_data, "records": new_records}
-
- @staticmethod
- def order_to_status(order: OrderIn) -> dict:
- """Convert input to pools"""
-
- status_data = {
- "customer": order.customer,
- "order": order.name,
- "comment": order.comment,
- "pools": [],
- }
-
- # group pools
- pools = {}
-
- for sample in order.samples:
- pool_name = sample.pool
- application = sample.application
- data_analysis = sample.data_analysis
- data_delivery = sample.data_delivery
- priority = sample.priority
-
- if pool_name not in pools:
- pools[pool_name] = {}
- pools[pool_name]["name"] = pool_name
- pools[pool_name]["applications"] = set()
- pools[pool_name]["priorities"] = set()
- pools[pool_name]["samples"] = []
-
- pools[pool_name]["samples"].append(sample)
- pools[pool_name]["applications"].add(application)
- pools[pool_name]["priorities"].add(priority)
-
- # each pool must only have same of some values
- for pool in pools.values():
- applications = pool["applications"]
- priorities = pool["priorities"]
- pool_name = pool["name"]
- if len(applications) > 1:
- raise OrderError(f"different applications in pool: {pool_name} - {applications}")
- if len(priorities) > 1:
- raise OrderError(f"different priorities in pool: {pool_name} - {priorities}")
-
- for pool in pools.values():
- pool_name = pool["name"]
- applications = pool["applications"]
- application = applications.pop()
- pool_samples = pool["samples"]
- priorities = pool["priorities"]
- priority = priorities.pop()
-
- status_data["pools"].append(
- {
- "name": pool_name,
- "application": application,
- "data_analysis": data_analysis,
- "data_delivery": data_delivery,
- "priority": priority,
- "samples": [
- {
- "comment": sample.comment,
- "control": sample.control,
- "name": sample.name,
- }
- for sample in pool_samples
- ],
- }
- )
- return status_data
-
- def store_items_in_status(
- self, customer_id: str, order: str, ordered: datetime, ticket_id: str, items: list[dict]
- ) -> list[Pool]:
- """Store pools in the status database."""
- customer: Customer = self.status_db.get_customer_by_internal_id(
- customer_internal_id=customer_id
- )
- status_db_order = Order(
- customer=customer,
- order_date=datetime.now(),
- ticket_id=int(ticket_id),
- )
- new_pools: list[Pool] = []
- new_samples: list[Sample] = []
- for pool in items:
- with self.status_db.session.no_autoflush:
- application_version: ApplicationVersion = (
- self.status_db.get_current_application_version_by_tag(tag=pool["application"])
- )
- priority: str = pool["priority"]
- case_name: str = self.create_case_name(ticket=ticket_id, pool_name=pool["name"])
- case: Case = self.status_db.get_case_by_name_and_customer(
- customer=customer, case_name=case_name
- )
- if not case:
- data_analysis: Workflow = Workflow(pool["data_analysis"])
- data_delivery: DataDelivery = DataDelivery(pool["data_delivery"])
- case = self.status_db.add_case(
- data_analysis=data_analysis,
- data_delivery=data_delivery,
- name=case_name,
- panels=None,
- priority=priority,
- ticket=ticket_id,
- )
- case.customer = customer
- self.status_db.session.add(case)
-
- new_pool: Pool = self.status_db.add_pool(
- application_version=application_version,
- customer=customer,
- name=pool["name"],
- order=order,
- ordered=ordered,
- ticket=ticket_id,
- )
- sex: SexEnum = SexEnum.unknown
- for sample in pool["samples"]:
- new_sample = self.status_db.add_sample(
- name=sample["name"],
- sex=sex,
- comment=sample["comment"],
- control=sample.get("control"),
- internal_id=sample.get("internal_id"),
- order=order,
- ordered=ordered,
- original_ticket=ticket_id,
- priority=priority,
- application_version=application_version,
- customer=customer,
- no_invoice=True,
- )
- new_samples.append(new_sample)
- link: CaseSample = self.status_db.relate_sample(
- case=case, sample=new_sample, status="unknown"
- )
- self.status_db.session.add(link)
- status_db_order.cases.append(case)
- new_pools.append(new_pool)
- self.status_db.session.add(status_db_order)
- self.status_db.session.add_all(new_pools)
- self.status_db.session.commit()
- return new_pools
-
- def _validate_case_names_are_available(
- self, customer_id: str, samples: list[RmlSample], ticket: str
- ):
- """Validate names of all samples are not already in use."""
- customer: Customer = self.status_db.get_customer_by_internal_id(
- customer_internal_id=customer_id
- )
- for sample in samples:
- case_name: str = self.create_case_name(pool_name=sample.pool, ticket=ticket)
- if self.status_db.get_case_by_name_and_customer(customer=customer, case_name=case_name):
- raise OrderError(
- f"Case name {case_name} already in use for customer {customer.name}"
- )
-
- @staticmethod
- def create_case_name(ticket: str, pool_name: str) -> str:
- return f"{ticket}-{pool_name}"
diff --git a/cg/services/orders/storing/__init__.py b/cg/services/orders/storing/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/cg/services/orders/storing/constants.py b/cg/services/orders/storing/constants.py
new file mode 100644
index 0000000000..7adf027cf1
--- /dev/null
+++ b/cg/services/orders/storing/constants.py
@@ -0,0 +1,2 @@
+# Hardcoded constant for special order to assign MAF cases to
+MAF_ORDER_ID: int = 12377
diff --git a/cg/services/orders/storing/implementations/__init__.py b/cg/services/orders/storing/implementations/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/cg/services/orders/storing/implementations/case_order_service.py b/cg/services/orders/storing/implementations/case_order_service.py
new file mode 100644
index 0000000000..f2f8808a19
--- /dev/null
+++ b/cg/services/orders/storing/implementations/case_order_service.py
@@ -0,0 +1,229 @@
+import logging
+from datetime import datetime
+
+from cg.constants.constants import CaseActions, DataDelivery, Workflow
+from cg.constants.pedigree import Pedigree
+from cg.services.orders.constants import ORDER_TYPE_WORKFLOW_MAP
+from cg.services.orders.lims_service.service import OrderLimsService
+from cg.services.orders.storing.service import StoreOrderService
+from cg.services.orders.validation.models.case import Case
+from cg.services.orders.validation.models.existing_case import ExistingCase
+from cg.services.orders.validation.models.order_with_cases import OrderWithCases
+from cg.services.orders.validation.models.sample_aliases import SampleInCase
+from cg.store.models import ApplicationVersion
+from cg.store.models import Case as DbCase
+from cg.store.models import CaseSample, Customer
+from cg.store.models import Order as DbOrder
+from cg.store.models import Sample as DbSample
+from cg.store.store import Store
+
+LOG = logging.getLogger(__name__)
+
+
+class StoreCaseOrderService(StoreOrderService):
+ """
+ Service for storing generic orders in StatusDB and Lims.
+ This class is used to store orders for the following workflows:
+ - Balsamic
+ - Balsamic UMI
+ - MIP DNA
+ - MIP RNA
+ - RNAFusion
+ - Tomte
+ """
+
+ def __init__(
+ self,
+ status_db: Store,
+ lims_service: OrderLimsService,
+ ):
+ self.status_db = status_db
+ self.lims = lims_service
+
+ def store_order(self, order: OrderWithCases) -> dict:
+ """Submit a batch of samples for sequencing and analysis."""
+ project_data = lims_map = None
+ if new_samples := [sample for _, _, sample in order.enumerated_new_samples]:
+ project_data, lims_map = self.lims.process_lims(
+ samples=new_samples,
+ customer=order.customer,
+ ticket=order._generated_ticket_id,
+ order_name=order.name,
+ workflow=ORDER_TYPE_WORKFLOW_MAP[order.order_type],
+ delivery_type=order.delivery_type,
+ skip_reception_control=order.skip_reception_control,
+ )
+ if lims_map:
+ self._fill_in_sample_ids(samples=new_samples, lims_map=lims_map)
+
+ new_cases: list[DbCase] = self.store_order_data_in_status_db(order)
+ return {"project": project_data, "records": new_cases}
+
+ def store_order_data_in_status_db(self, order: OrderWithCases) -> list[DbCase]:
+ """Store cases, samples and their relationship in the Status database."""
+ new_cases: list[DbCase] = []
+ db_order = self._create_db_order(order)
+ for case in order.cases:
+ if case.is_new:
+ db_case: DbCase = self._create_db_case(
+ case=case,
+ customer=db_order.customer,
+ ticket=str(order._generated_ticket_id),
+ workflow=ORDER_TYPE_WORKFLOW_MAP[order.order_type],
+ delivery_type=order.delivery_type,
+ )
+ new_cases.append(db_case)
+ self._update_case_panel(panels=getattr(case, "panels", []), case=db_case)
+ case_samples: dict[str, DbSample] = self._create_db_sample_dict(
+ case=case, order=order, customer=db_order.customer
+ )
+ self._create_links(case=case, db_case=db_case, case_samples=case_samples)
+
+ else:
+ db_case: DbCase = self._update_existing_case(
+ existing_case=case, ticket_id=order._generated_ticket_id
+ )
+
+ db_order.cases.append(db_case)
+ self.status_db.session.add_all(new_cases)
+ self.status_db.session.add(db_order)
+ self.status_db.session.commit()
+ return new_cases
+
+ @staticmethod
+ def _update_case_panel(panels: list[str], case: DbCase) -> None:
+ """Update case panels."""
+ case.panels = panels
+
+ @staticmethod
+ def _append_ticket(ticket_id: str, case: DbCase) -> None:
+ """Add a ticket to the case."""
+ case.tickets = f"{case.tickets},{ticket_id}"
+
+ @staticmethod
+ def _update_action(action: str, case: DbCase) -> None:
+ """Update action of a case."""
+ case.action = action
+
+ def _create_link(
+ self,
+ case: DbCase,
+ db_sample: DbSample,
+ father: DbSample,
+ mother: DbSample,
+ sample: SampleInCase,
+ ) -> CaseSample:
+ return self.status_db.relate_sample(
+ case=case,
+ sample=db_sample,
+ status=getattr(sample, "status", None),
+ mother=mother,
+ father=father,
+ )
+
+ def _create_db_sample(
+ self,
+ case: Case,
+ customer: Customer,
+ order_name: str,
+ ordered: datetime,
+ sample: SampleInCase,
+ ticket: str,
+ ):
+ application_tag = sample.application
+ application_version: ApplicationVersion = (
+ self.status_db.get_current_application_version_by_tag(tag=application_tag)
+ )
+ db_sample: DbSample = self.status_db.add_sample(
+ application_version=application_version,
+ internal_id=sample._generated_lims_id,
+ order=order_name,
+ ordered=ordered,
+ original_ticket=ticket,
+ priority=case.priority,
+ **sample.model_dump(exclude={"application", "container", "container_name"}),
+ )
+ db_sample.customer = customer
+ self.status_db.session.add(db_sample)
+ return db_sample
+
+ def _create_db_case(
+ self,
+ case: Case,
+ customer: Customer,
+ ticket: str,
+ workflow: Workflow,
+ delivery_type: DataDelivery,
+ ) -> DbCase:
+ db_case: DbCase = self.status_db.add_case(
+ ticket=ticket,
+ data_analysis=workflow,
+ data_delivery=delivery_type,
+ **case.model_dump(exclude={"samples"}),
+ )
+ db_case.customer = customer
+ return db_case
+
+ def _create_db_order(self, order: OrderWithCases) -> DbOrder:
+ customer: Customer = self.status_db.get_customer_by_internal_id(
+ customer_internal_id=order.customer
+ )
+ return DbOrder(
+ customer=customer,
+ order_date=datetime.now(),
+ ticket_id=order._generated_ticket_id,
+ )
+
+ def _update_existing_case(self, existing_case: ExistingCase, ticket_id: int) -> DbCase:
+ status_db_case = self.status_db.get_case_by_internal_id(existing_case.internal_id)
+ self._append_ticket(ticket_id=str(ticket_id), case=status_db_case)
+ self._update_action(action=CaseActions.ANALYZE, case=status_db_case)
+ self._update_case_panel(panels=getattr(existing_case, "panels", []), case=status_db_case)
+ return status_db_case
+
+ def _create_links(self, case: Case, db_case: DbCase, case_samples: dict[str, DbSample]) -> None:
+ """Creates entries in the CaseSample table.
+ Input:
+ - case: Case, a case within the customer submitted order.
+ - db_case: DbCase, Database case entry corresponding to the 'case' parameter.
+ - case_samples: dict with keys being sample names in the provided 'case' and values being
+ the corresponding database entries in the Sample table."""
+ for sample in case.samples:
+ if sample.is_new:
+ db_sample: DbSample = case_samples.get(sample.name)
+ else:
+ db_sample: DbSample = self.status_db.get_sample_by_internal_id(sample.internal_id)
+ sample_mother_name: str | None = getattr(sample, Pedigree.MOTHER, None)
+ db_sample_mother: DbSample | None = case_samples.get(sample_mother_name)
+ sample_father_name: str = getattr(sample, Pedigree.FATHER, None)
+ db_sample_father: DbSample | None = case_samples.get(sample_father_name)
+ case_sample: CaseSample = self._create_link(
+ case=db_case,
+ db_sample=db_sample,
+ father=db_sample_father,
+ mother=db_sample_mother,
+ sample=sample,
+ )
+ self.status_db.add_item_to_store(case_sample)
+
+ def _create_db_sample_dict(
+ self, case: Case, order: OrderWithCases, customer: Customer
+ ) -> dict[str, DbSample]:
+ """Constructs a dict containing all the samples in the case. Keys are sample names
+ and the values are the database entries for the samples."""
+ case_samples: dict[str, DbSample] = {}
+ for sample in case.samples:
+ if sample.is_new:
+ with self.status_db.session.no_autoflush:
+ db_sample: DbSample = self._create_db_sample(
+ case=case,
+ customer=customer,
+ order_name=order.name,
+ ordered=datetime.now(),
+ sample=sample,
+ ticket=str(order._generated_ticket_id),
+ )
+ else:
+ db_sample: DbSample = self.status_db.get_sample_by_internal_id(sample.internal_id)
+ case_samples[db_sample.name] = db_sample
+ return case_samples
diff --git a/cg/services/orders/storing/implementations/fastq_order_service.py b/cg/services/orders/storing/implementations/fastq_order_service.py
new file mode 100644
index 0000000000..ea6df4656b
--- /dev/null
+++ b/cg/services/orders/storing/implementations/fastq_order_service.py
@@ -0,0 +1,146 @@
+import logging
+from datetime import datetime
+
+from cg.constants import DataDelivery, GenePanelMasterList, Priority, Workflow
+from cg.constants.constants import CustomerId
+from cg.constants.sequencing import SeqLibraryPrepCategory
+from cg.models.orders.sample_base import SexEnum, StatusEnum
+from cg.services.orders.constants import ORDER_TYPE_WORKFLOW_MAP
+from cg.services.orders.lims_service.service import OrderLimsService
+from cg.services.orders.storing.constants import MAF_ORDER_ID
+from cg.services.orders.storing.service import StoreOrderService
+from cg.services.orders.validation.workflows.fastq.models.order import FastqOrder
+from cg.services.orders.validation.workflows.fastq.models.sample import FastqSample
+from cg.store.models import ApplicationVersion, Case, CaseSample, Customer, Order, Sample
+from cg.store.store import Store
+
+LOG = logging.getLogger(__name__)
+
+
+class StoreFastqOrderService(StoreOrderService):
+ """Storing service for FASTQ orders."""
+
+ def __init__(self, status_db: Store, lims_service: OrderLimsService):
+ self.status_db = status_db
+ self.lims = lims_service
+
+ def store_order(self, order: FastqOrder) -> dict:
+ """Submit a batch of samples for FASTQ delivery."""
+ project_data, lims_map = self.lims.process_lims(
+ samples=order.samples,
+ ticket=order._generated_ticket_id,
+ order_name=order.name,
+ workflow=Workflow.RAW_DATA,
+ customer=order.customer,
+ delivery_type=DataDelivery(order.delivery_type),
+ skip_reception_control=order.skip_reception_control,
+ )
+ self._fill_in_sample_ids(samples=order.samples, lims_map=lims_map)
+ new_samples: list[Sample] = self.store_order_data_in_status_db(order=order)
+ return {"records": new_samples, "project": project_data}
+
+ def store_order_data_in_status_db(self, order: FastqOrder) -> list[Sample]:
+ """
+ Store all order data in the Status database for a FASTQ order. Return the samples.
+ The stored data objects are:
+ - Order
+ - Samples
+ - One Case containing all samples
+ - For each Sample, a relationship between the sample and the Case
+ - For each non-tumour WGS Sample, a MAF Case and a relationship between the Sample and the
+ MAF Case
+ """
+ db_order: Order = self._create_db_order(order=order)
+ db_case: Case = self._create_db_case(order=order, db_order=db_order)
+ new_samples = []
+ with self.status_db.session.no_autoflush:
+ for sample in order.samples:
+ db_sample: Sample = self._create_db_sample(
+ sample=sample,
+ order_name=order.name,
+ ticket_id=str(db_order.ticket_id),
+ customer=db_order.customer,
+ )
+ self._create_maf_case(db_sample=db_sample, db_order=db_order, db_case=db_case)
+ case_sample: CaseSample = self.status_db.relate_sample(
+ case=db_case, sample=db_sample, status=StatusEnum.unknown
+ )
+ self.status_db.add_multiple_items_to_store([db_sample, case_sample])
+ new_samples.append(db_sample)
+ db_order.cases.append(db_case)
+ self.status_db.add_multiple_items_to_store([db_order, db_case])
+ self.status_db.commit_to_store()
+ return new_samples
+
+ def _create_db_order(self, order: FastqOrder) -> Order:
+ """Return an Order database object."""
+ ticket_id: int = order._generated_ticket_id
+ customer: Customer = self.status_db.get_customer_by_internal_id(
+ customer_internal_id=order.customer
+ )
+ return self.status_db.add_order(customer=customer, ticket_id=ticket_id)
+
+ def _create_db_case(self, order: FastqOrder, db_order: Order) -> Case:
+ """Return a Case database object."""
+ priority: str = order.samples[0].priority
+ case: Case = self.status_db.add_case(
+ data_analysis=ORDER_TYPE_WORKFLOW_MAP[order.order_type],
+ data_delivery=DataDelivery(order.delivery_type),
+ name=str(db_order.ticket_id),
+ priority=priority,
+ ticket=str(db_order.ticket_id),
+ )
+ case.customer = db_order.customer
+ return case
+
+ def _create_db_sample(
+ self, sample: FastqSample, order_name: str, customer: Customer, ticket_id: str
+ ) -> Sample:
+ """Return a Sample database object."""
+ application_version: ApplicationVersion = (
+ self.status_db.get_current_application_version_by_tag(tag=sample.application)
+ )
+ return self.status_db.add_sample(
+ name=sample.name,
+ sex=sample.sex or SexEnum.unknown,
+ comment=sample.comment,
+ internal_id=sample._generated_lims_id,
+ ordered=datetime.now(),
+ original_ticket=ticket_id,
+ priority=sample.priority,
+ tumour=sample.tumour,
+ capture_kit=sample.capture_kit,
+ subject_id=sample.subject_id,
+ customer=customer,
+ application_version=application_version,
+ order=order_name,
+ )
+
+ def _create_maf_case(self, db_sample: Sample, db_order: Order, db_case: Case) -> None:
+ """
+ Add a MAF case and a relationship with the given sample to the current Status database
+ transaction. This is done only if the given sample is non-tumour and WGS.
+ This function does not commit to the database.
+ """
+ if (
+ not db_sample.is_tumour
+ and db_sample.prep_category == SeqLibraryPrepCategory.WHOLE_GENOME_SEQUENCING
+ ):
+ maf_order: Order = self.status_db.get_order_by_id(MAF_ORDER_ID)
+ maf_case: Case = self.status_db.add_case(
+ comment=f"MAF case for {db_case.internal_id} original order id {db_order.id}",
+ data_analysis=Workflow.MIP_DNA,
+ data_delivery=DataDelivery.NO_DELIVERY,
+ name="_".join([db_sample.name, "MAF"]),
+ panels=[GenePanelMasterList.OMIM_AUTO],
+ priority=Priority.research,
+ ticket=db_sample.original_ticket,
+ )
+ maf_case.customer = self.status_db.get_customer_by_internal_id(
+ customer_internal_id=CustomerId.CG_INTERNAL_CUSTOMER
+ )
+ maf_case_sample: CaseSample = self.status_db.relate_sample(
+ case=maf_case, sample=db_sample, status=StatusEnum.unknown
+ )
+ maf_order.cases.append(maf_case)
+ self.status_db.add_multiple_items_to_store([maf_case, maf_case_sample])
diff --git a/cg/services/orders/storing/implementations/metagenome_order_service.py b/cg/services/orders/storing/implementations/metagenome_order_service.py
new file mode 100644
index 0000000000..325a049c5a
--- /dev/null
+++ b/cg/services/orders/storing/implementations/metagenome_order_service.py
@@ -0,0 +1,106 @@
+import logging
+from datetime import datetime
+
+from cg.constants import DataDelivery, Sex
+from cg.models.orders.sample_base import PriorityEnum, StatusEnum
+from cg.services.orders.constants import ORDER_TYPE_WORKFLOW_MAP
+from cg.services.orders.lims_service.service import OrderLimsService
+from cg.services.orders.storing.service import StoreOrderService
+from cg.services.orders.validation.workflows.metagenome.models.order import MetagenomeOrder
+from cg.services.orders.validation.workflows.metagenome.models.sample import MetagenomeSample
+from cg.services.orders.validation.workflows.taxprofiler.models.order import TaxprofilerOrder
+from cg.services.orders.validation.workflows.taxprofiler.models.sample import TaxprofilerSample
+from cg.store.models import ApplicationVersion
+from cg.store.models import Case as DbCase
+from cg.store.models import CaseSample, Customer
+from cg.store.models import Order as DbOrder
+from cg.store.models import Sample as DbSample
+from cg.store.store import Store
+
+LOG = logging.getLogger(__name__)
+
+OrderMetagenome = MetagenomeOrder | TaxprofilerOrder
+SampleMetagenome = MetagenomeSample | TaxprofilerSample
+
+
+class StoreMetagenomeOrderService(StoreOrderService):
+ """Storing service for Metagenome or Taxprofiler orders."""
+
+ def __init__(self, status_db: Store, lims_service: OrderLimsService):
+ self.status_db = status_db
+ self.lims = lims_service
+
+ def store_order(self, order: OrderMetagenome) -> dict:
+ """Submit a batch of metagenome samples."""
+ project_data, lims_map = self.lims.process_lims(
+ samples=order.samples,
+ customer=order.customer,
+ ticket=order._generated_ticket_id,
+ order_name=order.name,
+ workflow=ORDER_TYPE_WORKFLOW_MAP[order.order_type],
+ delivery_type=DataDelivery(order.delivery_type),
+ skip_reception_control=order.skip_reception_control,
+ )
+ self._fill_in_sample_ids(samples=order.samples, lims_map=lims_map)
+ new_samples = self.store_order_data_in_status_db(order)
+ return {"project": project_data, "records": new_samples}
+
+ def store_order_data_in_status_db(
+ self,
+ order: OrderMetagenome,
+ ) -> list[DbSample]:
+ """Store samples in the StatusDB database."""
+ customer: Customer = self.status_db.get_customer_by_internal_id(order.customer)
+ new_samples = []
+ db_order: DbOrder = self.status_db.add_order(
+ customer=customer, ticket_id=order._generated_ticket_id
+ )
+ priority: PriorityEnum = order.samples[0].priority
+ db_case = self._create_db_case(order=order, customer=customer, priority=priority)
+ db_order.cases.append(db_case)
+ with self.status_db.session.no_autoflush:
+ for sample in order.samples:
+ db_sample = self._create_db_sample(order=order, sample=sample, customer=customer)
+ new_relationship: CaseSample = self.status_db.relate_sample(
+ case=db_case, sample=db_sample, status=StatusEnum.unknown
+ )
+ self.status_db.add_item_to_store(new_relationship)
+ new_samples.append(db_sample)
+ self.status_db.add_item_to_store(db_case)
+ self.status_db.add_item_to_store(db_order)
+ self.status_db.add_multiple_items_to_store(new_samples)
+ self.status_db.commit_to_store()
+ return new_samples
+
+ def _create_db_case(
+ self, order: OrderMetagenome, customer: Customer, priority: PriorityEnum
+ ) -> DbCase:
+ db_case: DbCase = self.status_db.add_case(
+ data_analysis=ORDER_TYPE_WORKFLOW_MAP[order.order_type],
+ data_delivery=DataDelivery(order.delivery_type),
+ name=str(order._generated_ticket_id),
+ priority=priority,
+ ticket=str(order._generated_ticket_id),
+ )
+ db_case.customer = customer
+ return db_case
+
+ def _create_db_sample(
+ self, sample: SampleMetagenome, order: OrderMetagenome, customer: Customer
+ ) -> DbSample:
+ db_sample: DbSample = self.status_db.add_sample(
+ name=sample.name,
+ sex=Sex.UNKNOWN,
+ comment=sample.comment,
+ control=sample.control,
+ order=order.name,
+ ordered=datetime.now(),
+ original_ticket=order._generated_ticket_id,
+ priority=sample.priority,
+ )
+ db_sample.customer = customer
+ application_version: ApplicationVersion = (
+ self.status_db.get_current_application_version_by_tag(sample.application)
+ )
+ db_sample.application_version = application_version
+ return db_sample
diff --git a/cg/services/orders/storing/implementations/microbial_fastq_order_service.py b/cg/services/orders/storing/implementations/microbial_fastq_order_service.py
new file mode 100644
index 0000000000..69dd9cdfdd
--- /dev/null
+++ b/cg/services/orders/storing/implementations/microbial_fastq_order_service.py
@@ -0,0 +1,114 @@
+from datetime import datetime
+
+from cg.constants import DataDelivery, SexOptions, Workflow
+from cg.models.orders.sample_base import StatusEnum
+from cg.services.orders.constants import ORDER_TYPE_WORKFLOW_MAP
+from cg.services.orders.lims_service.service import OrderLimsService
+from cg.services.orders.storing.service import StoreOrderService
+from cg.services.orders.validation.workflows.microbial_fastq.models.order import MicrobialFastqOrder
+from cg.services.orders.validation.workflows.microbial_fastq.models.sample import (
+ MicrobialFastqSample,
+)
+from cg.store.models import ApplicationVersion, Case, CaseSample, Customer, Order, Sample
+from cg.store.store import Store
+
+
+class StoreMicrobialFastqOrderService(StoreOrderService):
+
+ def __init__(self, status_db: Store, lims_service: OrderLimsService):
+ self.status_db = status_db
+ self.lims = lims_service
+
+ def store_order(self, order: MicrobialFastqOrder) -> dict:
+ """Store the order in the statusDB and LIMS, return the database samples and LIMS info."""
+ project_data, lims_map = self.lims.process_lims(
+ samples=order.samples,
+ ticket=order._generated_ticket_id,
+ order_name=order.name,
+ workflow=Workflow.RAW_DATA,
+ customer=order.customer,
+ delivery_type=DataDelivery(order.delivery_type),
+ skip_reception_control=order.skip_reception_control,
+ )
+ self._fill_in_sample_ids(samples=order.samples, lims_map=lims_map)
+ new_samples: list[Sample] = self.store_order_data_in_status_db(order=order)
+ return {"records": new_samples, "project": project_data}
+
+ def store_order_data_in_status_db(self, order: MicrobialFastqOrder) -> list[Sample]:
+ """
+ Store all order data in the Status database for a Microbial FASTQ order. Return the samples.
+ The stored data objects are:
+ - Order
+ - Samples
+ - For each Sample, a Case
+ - For each Sample, a relationship between the Sample and its Case
+ """
+ db_order = self._create_db_order(order=order)
+ new_samples = []
+ with self.status_db.session.no_autoflush:
+ for sample in order.samples:
+ case: Case = self._create_db_case_for_sample(
+ sample=sample, customer=db_order.customer, order=order
+ )
+ db_sample: Sample = self._create_db_sample(
+ sample=sample,
+ order_name=order.name,
+ ticket_id=str(db_order.ticket_id),
+ customer=db_order.customer,
+ )
+ case_sample: CaseSample = self.status_db.relate_sample(
+ case=case, sample=db_sample, status=StatusEnum.unknown
+ )
+ self.status_db.add_multiple_items_to_store([case, db_sample, case_sample])
+ db_order.cases.append(case)
+ new_samples.append(db_sample)
+ self.status_db.add_item_to_store(db_order)
+ self.status_db.commit_to_store()
+ return new_samples
+
+ def _create_db_order(self, order: MicrobialFastqOrder) -> Order:
+ """Return an Order database object."""
+ ticket_id: int = order._generated_ticket_id
+ customer: Customer = self.status_db.get_customer_by_internal_id(
+ customer_internal_id=order.customer
+ )
+ return self.status_db.add_order(customer=customer, ticket_id=ticket_id)
+
+ def _create_db_case_for_sample(
+ self, sample: MicrobialFastqSample, customer: Customer, order: MicrobialFastqOrder
+ ) -> Case:
+ """Return a Case database object for a MicrobialFastqSample."""
+ case_name: str = f"{sample.name}-case"
+ case: Case = self.status_db.add_case(
+ data_analysis=ORDER_TYPE_WORKFLOW_MAP[order.order_type],
+ data_delivery=DataDelivery(order.delivery_type),
+ name=case_name,
+ priority=sample.priority,
+ ticket=str(order._generated_ticket_id),
+ )
+ case.customer = customer
+ return case
+
+ def _create_db_sample(
+ self,
+ sample: MicrobialFastqSample,
+ order_name: str,
+ ticket_id: str,
+ customer: Customer,
+ ) -> Sample:
+ """Return a Sample database object."""
+ application_version: ApplicationVersion = (
+ self.status_db.get_current_application_version_by_tag(tag=sample.application)
+ )
+ return self.status_db.add_sample(
+ name=sample.name,
+ customer=customer,
+ application_version=application_version,
+ sex=SexOptions.UNKNOWN,
+ comment=sample.comment,
+ internal_id=sample._generated_lims_id,
+ order=order_name,
+ ordered=datetime.now(),
+ original_ticket=ticket_id,
+ priority=sample.priority,
+ )
diff --git a/cg/services/orders/storing/implementations/microbial_order_service.py b/cg/services/orders/storing/implementations/microbial_order_service.py
new file mode 100644
index 0000000000..4922db7146
--- /dev/null
+++ b/cg/services/orders/storing/implementations/microbial_order_service.py
@@ -0,0 +1,148 @@
+import logging
+from datetime import datetime
+
+from cg.constants import DataDelivery, Sex
+from cg.services.orders.constants import ORDER_TYPE_WORKFLOW_MAP
+from cg.services.orders.lims_service.service import OrderLimsService
+from cg.services.orders.storing.service import StoreOrderService
+from cg.services.orders.validation.workflows.microsalt.models.order import MicrosaltOrder
+from cg.services.orders.validation.workflows.microsalt.models.sample import MicrosaltSample
+from cg.services.orders.validation.workflows.mutant.models.order import MutantOrder
+from cg.services.orders.validation.workflows.mutant.models.sample import MutantSample
+from cg.store.models import ApplicationVersion
+from cg.store.models import Case as DbCase
+from cg.store.models import CaseSample, Customer
+from cg.store.models import Order as DbOrder
+from cg.store.models import Organism
+from cg.store.models import Sample as DbSample
+from cg.store.store import Store
+
+LOG = logging.getLogger(__name__)
+
+MicrobialOrder = MicrosaltOrder | MutantOrder
+MicrobialSample = MicrosaltSample | MutantSample
+
+
+class StoreMicrobialOrderService(StoreOrderService):
+ """
+ Storing service for microbial orders.
+ These include:
+ - Mutant samples
+ - Microsalt samples
+ """
+
+ def __init__(self, status_db: Store, lims_service: OrderLimsService):
+ self.status = status_db
+ self.lims = lims_service
+
+ def store_order(self, order: MicrobialOrder) -> dict:
+ self._fill_in_sample_verified_organism(order.samples)
+ project_data, lims_map = self.lims.process_lims(
+ samples=order.samples,
+ customer=order.customer,
+ ticket=order._generated_ticket_id,
+ order_name=order.name,
+ workflow=ORDER_TYPE_WORKFLOW_MAP[order.order_type],
+ delivery_type=DataDelivery(order.delivery_type),
+ skip_reception_control=order.skip_reception_control,
+ )
+ self._fill_in_sample_ids(samples=order.samples, lims_map=lims_map)
+
+ samples = self.store_order_data_in_status_db(order)
+ return {"project": project_data, "records": samples}
+
+ def store_order_data_in_status_db(self, order: MicrobialOrder) -> list[DbSample]:
+ """Store microbial samples in the status database."""
+
+ customer: Customer = self.status.get_customer_by_internal_id(order.customer)
+ new_samples = []
+ db_order: DbOrder = self.status.add_order(
+ customer=customer,
+ ticket_id=order._generated_ticket_id,
+ )
+ db_case: DbCase = self._create_case(customer=customer, order=order)
+
+ with self.status.no_autoflush_context():
+ for sample in order.samples:
+ organism: Organism = self._ensure_organism(sample)
+ db_sample = self._create_db_sample(
+ customer=customer,
+ order_name=order.name,
+ organism=organism,
+ sample=sample,
+ ticket_id=order._generated_ticket_id,
+ )
+ link: CaseSample = self.status.relate_sample(
+ case=db_case, sample=db_sample, status="unknown"
+ )
+ self.status.add_item_to_store(link)
+ new_samples.append(db_sample)
+ db_order.cases.append(db_case)
+
+ self.status.add_item_to_store(db_case)
+ self.status.add_item_to_store(db_order)
+ self.status.add_multiple_items_to_store(new_samples)
+ self.status.commit_to_store()
+ return new_samples
+
+ def _fill_in_sample_verified_organism(self, samples: list[MicrobialSample]):
+ for sample in samples:
+ organism_id = sample.organism
+ reference_genome = sample.reference_genome
+ organism: Organism = self.status.get_organism_by_internal_id(internal_id=organism_id)
+ is_verified: bool = (
+ organism and organism.reference_genome == reference_genome and organism.verified
+ )
+ sample._verified_organism = is_verified
+
+ def _create_case(self, customer: Customer, order: MicrobialOrder) -> DbCase:
+ case: DbCase = self.status.add_case(
+ data_analysis=ORDER_TYPE_WORKFLOW_MAP[order.order_type],
+ data_delivery=DataDelivery(order.delivery_type),
+ name=str(order._generated_ticket_id),
+ panels=None,
+ ticket=str(order._generated_ticket_id),
+ priority=order.samples[0].priority,
+ )
+ case.customer = customer
+ return case
+
+ def _ensure_organism(self, sample: MicrobialSample) -> Organism:
+ organism: Organism = self.status.get_organism_by_internal_id(sample.organism)
+ if not organism:
+ organism: Organism = self.status.add_organism(
+ internal_id=sample.organism,
+ name=sample.organism,
+ reference_genome=sample.reference_genome,
+ )
+ self.status.add_item_to_store(organism)
+ self.status.commit_to_store()
+ return organism
+
+ def _create_db_sample(
+ self,
+ customer: Customer,
+ order_name: str,
+ organism: Organism,
+ sample: MicrobialSample,
+ ticket_id: int,
+ ) -> DbSample:
+ application_tag: str = sample.application
+ application_version: ApplicationVersion = (
+ self.status.get_current_application_version_by_tag(tag=application_tag)
+ )
+ return self.status.add_sample(
+ name=sample.name,
+ sex=Sex.UNKNOWN,
+ comment=sample.comment,
+ control=sample.control,
+ internal_id=sample._generated_lims_id,
+ order=order_name,
+ ordered=datetime.now(),
+ original_ticket=str(ticket_id),
+ priority=sample.priority,
+ application_version=application_version,
+ customer=customer,
+ organism=organism,
+ reference_genome=sample.reference_genome,
+ )
diff --git a/cg/services/orders/storing/implementations/pacbio_order_service.py b/cg/services/orders/storing/implementations/pacbio_order_service.py
new file mode 100644
index 0000000000..536a03b124
--- /dev/null
+++ b/cg/services/orders/storing/implementations/pacbio_order_service.py
@@ -0,0 +1,116 @@
+import logging
+from datetime import datetime
+
+from cg.constants import DataDelivery, Workflow
+from cg.models.orders.sample_base import StatusEnum
+from cg.services.orders.constants import ORDER_TYPE_WORKFLOW_MAP
+from cg.services.orders.lims_service.service import OrderLimsService
+from cg.services.orders.storing.service import StoreOrderService
+from cg.services.orders.validation.workflows.pacbio_long_read.models.order import PacbioOrder
+from cg.services.orders.validation.workflows.pacbio_long_read.models.sample import PacbioSample
+from cg.store.models import ApplicationVersion, Case, CaseSample, Customer, Order, Sample
+from cg.store.store import Store
+
+LOG = logging.getLogger(__name__)
+
+
+class StorePacBioOrderService(StoreOrderService):
+ """Storing service for PacBio Long Read orders."""
+
+ def __init__(self, status_db: Store, lims_service: OrderLimsService):
+ self.status_db = status_db
+ self.lims = lims_service
+
+ def store_order(self, order: PacbioOrder) -> dict:
+ """Store the order in the statusDB and LIMS, return the database samples and LIMS info."""
+ project_data, lims_map = self.lims.process_lims(
+ samples=order.samples,
+ ticket=order._generated_ticket_id,
+ order_name=order.name,
+ workflow=Workflow.RAW_DATA,
+ customer=order.customer,
+ delivery_type=DataDelivery(order.delivery_type),
+ skip_reception_control=order.skip_reception_control,
+ )
+ self._fill_in_sample_ids(samples=order.samples, lims_map=lims_map)
+ new_samples = self.store_order_data_in_status_db(order=order)
+ return {"project": project_data, "records": new_samples}
+
+ def store_order_data_in_status_db(self, order: PacbioOrder) -> list[Sample]:
+ """
+ Store all order data in the Status database for a Pacbio order. Return the samples.
+ The stored data objects are:
+ - Order
+ - Samples
+ - For each Sample, a Case
+ - For each Sample, a relationship between the Sample and its Case
+ """
+ status_db_order: Order = self._create_db_order(order=order)
+ new_samples = []
+ with self.status_db.no_autoflush_context():
+ for sample in order.samples:
+ case: Case = self._create_db_case_for_sample(
+ sample=sample,
+ customer=status_db_order.customer,
+ order=order,
+ )
+ db_sample: Sample = self._create_db_sample(
+ sample=sample,
+ order_name=order.name,
+ customer=status_db_order.customer,
+ ticket_id=str(status_db_order.ticket_id),
+ )
+ case_sample: CaseSample = self.status_db.relate_sample(
+ case=case, sample=db_sample, status=StatusEnum.unknown
+ )
+ self.status_db.add_multiple_items_to_store([case, case_sample, db_sample])
+ status_db_order.cases.append(case)
+ new_samples.append(db_sample)
+ self.status_db.add_item_to_store(status_db_order)
+ self.status_db.commit_to_store()
+ return new_samples
+
+ def _create_db_order(self, order: PacbioOrder) -> Order:
+ """Return an Order database object."""
+ ticket_id: int = order._generated_ticket_id
+ customer: Customer = self.status_db.get_customer_by_internal_id(
+ customer_internal_id=order.customer
+ )
+ return self.status_db.add_order(customer=customer, ticket_id=ticket_id)
+
+ def _create_db_case_for_sample(
+ self, sample: PacbioSample, customer: Customer, order: PacbioOrder
+ ) -> Case:
+ """Return a Case database object for a PacbioSample."""
+ case_name: str = f"{sample.name}-case"
+ case: Case = self.status_db.add_case(
+ data_analysis=ORDER_TYPE_WORKFLOW_MAP[order.order_type],
+ data_delivery=DataDelivery(order.delivery_type),
+ name=case_name,
+ priority=sample.priority,
+ ticket=str(order._generated_ticket_id),
+ )
+ case.customer = customer
+ return case
+
+ def _create_db_sample(
+ self, sample: PacbioSample, order_name: str, customer: Customer, ticket_id: str
+ ) -> Sample:
+ """Return a Sample database object."""
+ application_version: ApplicationVersion = (
+ self.status_db.get_current_application_version_by_tag(tag=sample.application)
+ )
+ return self.status_db.add_sample(
+ name=sample.name,
+ customer=customer,
+ application_version=application_version,
+ sex=sample.sex,
+ comment=sample.comment,
+ internal_id=sample._generated_lims_id,
+ order=order_name,
+ ordered=datetime.now(),
+ original_ticket=ticket_id,
+ priority=sample.priority,
+ tumour=sample.tumour,
+ subject_id=sample.subject_id,
+ )
diff --git a/cg/services/orders/storing/implementations/pool_order_service.py b/cg/services/orders/storing/implementations/pool_order_service.py
new file mode 100644
index 0000000000..dd6b59f8d5
--- /dev/null
+++ b/cg/services/orders/storing/implementations/pool_order_service.py
@@ -0,0 +1,173 @@
+import logging
+from datetime import datetime
+
+from cg.models.orders.sample_base import PriorityEnum, SexEnum, StatusEnum
+from cg.services.orders.constants import ORDER_TYPE_WORKFLOW_MAP
+from cg.services.orders.lims_service.service import OrderLimsService
+from cg.services.orders.storing.service import StoreOrderService
+from cg.services.orders.validation.models.order_aliases import OrderWithIndexedSamples
+from cg.services.orders.validation.models.sample_aliases import IndexedSample
+from cg.store.models import ApplicationVersion, Case, CaseSample, Customer, Order, Pool, Sample
+from cg.store.store import Store
+
+LOG = logging.getLogger(__name__)
+
+
+class StorePoolOrderService(StoreOrderService):
+ """
+ Service for storing generic orders in StatusDB and Lims.
+ This class is used to store orders for the following workflows:
+ - Fluffy / NIPT samples
+ - RML samples
+ """
+
+ def __init__(self, status_db: Store, lims_service: OrderLimsService):
+ self.status_db = status_db
+ self.lims = lims_service
+
+ def store_order(self, order: OrderWithIndexedSamples) -> dict:
+ project_data, lims_map = self.lims.process_lims(
+ samples=order.samples,
+ customer=order.customer,
+ ticket=order._generated_ticket_id,
+ order_name=order.name,
+ workflow=ORDER_TYPE_WORKFLOW_MAP[order.order_type],
+ delivery_type=order.delivery_type,
+ skip_reception_control=order.skip_reception_control,
+ )
+ self._fill_in_sample_ids(samples=order.samples, lims_map=lims_map)
+ new_records: list[Pool] = self.store_order_data_in_status_db(order=order)
+ return {"project": project_data, "records": new_records}
+
+ def store_order_data_in_status_db(self, order: OrderWithIndexedSamples) -> list[Pool]:
+ """Store pools in the status database."""
+ db_order: Order = self._create_db_order(order=order)
+ new_pools: list[Pool] = []
+ with self.status_db.no_autoflush_context():
+ for pool in order.pools.items():
+ db_case: Case = self._create_db_case_for_pool(
+ order=order,
+ pool=pool,
+ customer=db_order.customer,
+ ticket_id=str(db_order.ticket_id),
+ )
+ db_pool: Pool = self._create_db_pool(
+ pool=pool,
+ order_name=order.name,
+ ticket_id=str(db_order.ticket_id),
+ customer=db_order.customer,
+ )
+ for sample in pool[1]:
+ db_sample: Sample = self._create_db_sample(
+ sample=sample,
+ order_name=order.name,
+ ticket_id=str(db_order.ticket_id),
+ customer=db_order.customer,
+ application_version=db_pool.application_version,
+ )
+ case_sample: CaseSample = self.status_db.relate_sample(
+ case=db_case, sample=db_sample, status=StatusEnum.unknown
+ )
+ self.status_db.add_multiple_items_to_store([db_sample, case_sample])
+ new_pools.append(db_pool)
+ db_order.cases.append(db_case)
+ self.status_db.add_multiple_items_to_store([db_pool, db_case])
+ self.status_db.add_item_to_store(db_order)
+ self.status_db.commit_to_store()
+ return new_pools
+
+ @staticmethod
+ def create_case_name(ticket: str, pool_name: str) -> str:
+ return f"{ticket}-{pool_name}"
+
+ def _get_application_version_from_pool_samples(
+ self, pool_samples: list[IndexedSample]
+ ) -> ApplicationVersion:
+ """
+ Return the application version for a pool by taking the app tag of the first sample of
+ the pool. The validation guarantees that all samples in a pool have the same application.
+ """
+ app_tag: str = pool_samples[0].application
+ application_version: ApplicationVersion = (
+ self.status_db.get_current_application_version_by_tag(tag=app_tag)
+ )
+ return application_version
+
+ @staticmethod
+ def _get_priority_from_pool_samples(pool_samples: list[IndexedSample]) -> PriorityEnum:
+ """
+ Return the priority of the pool by taking the priority of the first sample of the pool.
+ The validation guarantees that all samples in a pool have the same priority.
+ """
+ return pool_samples[0].priority
+
+ def _create_db_order(self, order: OrderWithIndexedSamples) -> Order:
+ """Return an Order database object."""
+ ticket_id: int = order._generated_ticket_id
+ customer: Customer = self.status_db.get_customer_by_internal_id(
+ customer_internal_id=order.customer
+ )
+ return self.status_db.add_order(customer=customer, ticket_id=ticket_id)
+
+ def _create_db_case_for_pool(
+ self,
+ order: OrderWithIndexedSamples,
+ pool: tuple[str, list[IndexedSample]],
+ customer: Customer,
+ ticket_id: str,
+ ) -> Case:
+ """Return a Case database object for a pool."""
+ case_name: str = self.create_case_name(ticket=ticket_id, pool_name=pool[0])
+ case = self.status_db.add_case(
+ data_analysis=ORDER_TYPE_WORKFLOW_MAP[order.order_type],
+ data_delivery=order.delivery_type,
+ name=case_name,
+ priority=self._get_priority_from_pool_samples(pool_samples=pool[1]),
+ ticket=ticket_id,
+ )
+ case.customer = customer
+ return case
+
+ def _create_db_pool(
+ self,
+ pool: tuple[str, list[IndexedSample]],
+ order_name: str,
+ ticket_id: str,
+ customer: Customer,
+ ) -> Pool:
+ """Return a Pool database object."""
+ application_version: ApplicationVersion = self._get_application_version_from_pool_samples(
+ pool_samples=pool[1]
+ )
+ return self.status_db.add_pool(
+ application_version=application_version,
+ customer=customer,
+ name=pool[0],
+ order=order_name,
+ ordered=datetime.now(),
+ ticket=ticket_id,
+ )
+
+ def _create_db_sample(
+ self,
+ sample: IndexedSample,
+ order_name: str,
+ ticket_id: str,
+ customer: Customer,
+ application_version: ApplicationVersion,
+ ) -> Sample:
+ """Return a Sample database object."""
+ return self.status_db.add_sample(
+ name=sample.name,
+ customer=customer,
+ application_version=application_version,
+ sex=SexEnum.unknown,
+ comment=sample.comment,
+ control=sample.control,
+ internal_id=sample._generated_lims_id,
+ order=order_name,
+ ordered=datetime.now(),
+ original_ticket=ticket_id,
+ priority=sample.priority,
+ no_invoice=True,
+ )
diff --git a/cg/services/orders/storing/service.py b/cg/services/orders/storing/service.py
new file mode 100644
index 0000000000..4c8b4fe6f5
--- /dev/null
+++ b/cg/services/orders/storing/service.py
@@ -0,0 +1,31 @@
+"""Abstract base classes for order submitters."""
+
+import logging
+from abc import ABC, abstractmethod
+
+from cg.services.orders.lims_service.service import OrderLimsService
+from cg.services.orders.validation.models.order import Order
+from cg.services.orders.validation.models.sample import Sample
+from cg.store.store import Store
+
+LOG = logging.getLogger(__name__)
+
+
+class StoreOrderService(ABC):
+ @abstractmethod
+ def __init__(self, status_db: Store, lims_service: OrderLimsService):
+ self.status_db = status_db
+ self.lims = lims_service
+
+ @abstractmethod
+ def store_order(self, order: Order):
+ pass
+
+ @staticmethod
+ def _fill_in_sample_ids(samples: list[Sample], lims_map: dict) -> None:
+ """Fill in LIMS sample ids."""
+ for sample in samples:
+ LOG.debug(f"{sample.name}: link sample to LIMS")
+ internal_id = lims_map[sample.name]
+ LOG.info(f"{sample.name} -> {internal_id}: connect sample to LIMS")
+ sample._generated_lims_id = internal_id
diff --git a/cg/services/orders/storing/service_registry.py b/cg/services/orders/storing/service_registry.py
new file mode 100644
index 0000000000..a0b56ae11b
--- /dev/null
+++ b/cg/services/orders/storing/service_registry.py
@@ -0,0 +1,126 @@
+from cg.apps.lims import LimsAPI
+from cg.models.orders.constants import OrderType
+from cg.services.orders.lims_service.service import OrderLimsService
+from cg.services.orders.storing.implementations.case_order_service import StoreCaseOrderService
+from cg.services.orders.storing.implementations.fastq_order_service import StoreFastqOrderService
+from cg.services.orders.storing.implementations.metagenome_order_service import (
+ StoreMetagenomeOrderService,
+)
+from cg.services.orders.storing.implementations.microbial_fastq_order_service import (
+ StoreMicrobialFastqOrderService,
+)
+from cg.services.orders.storing.implementations.microbial_order_service import (
+ StoreMicrobialOrderService,
+)
+from cg.services.orders.storing.implementations.pacbio_order_service import StorePacBioOrderService
+from cg.services.orders.storing.implementations.pool_order_service import StorePoolOrderService
+from cg.services.orders.storing.service import StoreOrderService
+from cg.store.store import Store
+
+
+class StoringServiceRegistry:
+ """
+ A registry for StoreOrderService instances, keyed by OrderType.
+ """
+
+ def __init__(self):
+ self._registry = {}
+
+ def register(self, order_type: OrderType, storing_service: StoreOrderService):
+ """Register a StoreOrderService instance for a given OrderType."""
+ self._registry[order_type] = storing_service
+
+ def get_storing_service(self, order_type: OrderType) -> StoreOrderService:
+ """Fetch the registered StoreOrderService for the given OrderType."""
+ if storing_service := self._registry.get(order_type):
+ return storing_service
+ raise ValueError(f"No StoreOrderService registered for order type: {order_type}")
+
+
+order_service_mapping = {
+ OrderType.BALSAMIC: (
+ OrderLimsService,
+ StoreCaseOrderService,
+ ),
+ OrderType.BALSAMIC_QC: (
+ OrderLimsService,
+ StoreCaseOrderService,
+ ),
+ OrderType.BALSAMIC_UMI: (
+ OrderLimsService,
+ StoreCaseOrderService,
+ ),
+ OrderType.FASTQ: (
+ OrderLimsService,
+ StoreFastqOrderService,
+ ),
+ OrderType.FLUFFY: (
+ OrderLimsService,
+ StorePoolOrderService,
+ ),
+ OrderType.METAGENOME: (
+ OrderLimsService,
+ StoreMetagenomeOrderService,
+ ),
+ OrderType.MICROBIAL_FASTQ: (
+ OrderLimsService,
+ StoreMicrobialFastqOrderService,
+ ),
+ OrderType.MICROSALT: (
+ OrderLimsService,
+ StoreMicrobialOrderService,
+ ),
+ OrderType.MIP_DNA: (
+ OrderLimsService,
+ StoreCaseOrderService,
+ ),
+ OrderType.MIP_RNA: (
+ OrderLimsService,
+ StoreCaseOrderService,
+ ),
+ OrderType.PACBIO_LONG_READ: (
+ OrderLimsService,
+ StorePacBioOrderService,
+ ),
+ OrderType.RML: (
+ OrderLimsService,
+ StorePoolOrderService,
+ ),
+ OrderType.RNAFUSION: (
+ OrderLimsService,
+ StoreCaseOrderService,
+ ),
+ OrderType.SARS_COV_2: (
+ OrderLimsService,
+ StoreMicrobialOrderService,
+ ),
+ OrderType.TAXPROFILER: (
+ OrderLimsService,
+ StoreMetagenomeOrderService,
+ ),
+ OrderType.TOMTE: (
+ OrderLimsService,
+ StoreCaseOrderService,
+ ),
+}
+
+
+def build_storing_service(
+ lims: LimsAPI, status_db: Store, order_type: OrderType
+) -> StoreOrderService:
+ """Build a StoreOrderService instance for the given OrderType."""
+ lims_service, store_service = order_service_mapping[order_type]
+ return store_service(status_db, lims_service(lims))
+
+
+def setup_storing_service_registry(lims: LimsAPI, status_db: Store) -> StoringServiceRegistry:
+ """Set up the StoringServiceRegistry with all StoreOrderService instances."""
+ registry = StoringServiceRegistry()
+ for order_type in order_service_mapping.keys():
+ registry.register(
+ order_type=order_type,
+ storing_service=build_storing_service(
+ lims=lims, status_db=status_db, order_type=order_type
+ ),
+ )
+ return registry
diff --git a/cg/services/orders/submitter/service.py b/cg/services/orders/submitter/service.py
new file mode 100644
index 0000000000..9b0daf6e65
--- /dev/null
+++ b/cg/services/orders/submitter/service.py
@@ -0,0 +1,46 @@
+"""Unified interface to handle sample submissions.
+
+This service will update information in Status and/or LIMS as required.
+
+The normal entry for information is through the REST API which will pass a JSON
+document with all information about samples in the submission. The input will
+be validated and if passing all checks be accepted as new samples.
+"""
+
+from cg.models.orders.constants import OrderType
+from cg.services.orders.storing.service import StoreOrderService
+from cg.services.orders.storing.service_registry import StoringServiceRegistry
+from cg.services.orders.submitter.ticket_handler import TicketHandler
+from cg.services.orders.validation.models.order import Order
+from cg.services.orders.validation.service import OrderValidationService
+from cg.store.models import User
+
+
+class OrderSubmitter:
+ """Orders API for accepting new samples into the system."""
+
+ def __init__(
+ self,
+ ticket_handler: TicketHandler,
+ storing_registry: StoringServiceRegistry,
+ validation_service: OrderValidationService,
+ ):
+ super().__init__()
+ self.ticket_handler = ticket_handler
+ self.storing_registry = storing_registry
+ self.validation_service = validation_service
+
+ def submit(self, order_type: OrderType, raw_order: dict, user: User) -> dict:
+ """Submit a batch of samples.
+
+ Main entry point for the class towards interfaces that implements it.
+ """
+ storing_service: StoreOrderService = self.storing_registry.get_storing_service(order_type)
+ order: Order = self.validation_service.parse_and_validate(
+ raw_order=raw_order, order_type=order_type, user_id=user.id
+ )
+ ticket_number: int = self.ticket_handler.create_ticket(
+ order=order, user_name=user.name, user_mail=user.email, order_type=order_type
+ )
+ order._generated_ticket_id = ticket_number
+ return storing_service.store_order(order)
diff --git a/cg/meta/orders/ticket_handler.py b/cg/services/orders/submitter/ticket_handler.py
similarity index 56%
rename from cg/meta/orders/ticket_handler.py
rename to cg/services/orders/submitter/ticket_handler.py
index 91d806fdfa..54ff979156 100644
--- a/cg/meta/orders/ticket_handler.py
+++ b/cg/services/orders/submitter/ticket_handler.py
@@ -1,13 +1,15 @@
import logging
-import re
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import Any
from cg.clients.freshdesk.freshdesk_client import FreshdeskClient
-from cg.clients.freshdesk.models import ReplyCreate, TicketCreate, TicketResponse
-from cg.models.orders.order import OrderIn
-from cg.models.orders.samples import Of1508Sample
+from cg.clients.freshdesk.models import TicketCreate, TicketResponse
+from cg.models.orders.constants import OrderType
+from cg.services.orders.constants import ORDER_TYPE_WORKFLOW_MAP
+from cg.services.orders.validation.models.order import Order
+from cg.services.orders.validation.models.order_with_cases import OrderWithCases
+from cg.services.orders.validation.models.order_with_samples import OrderWithSamples
from cg.store.models import Customer, Sample
from cg.store.store import Store
@@ -25,26 +27,14 @@ def __init__(self, db: Store, client: FreshdeskClient, system_email_id: int, env
self.system_email_id: int = system_email_id
self.env: str = env
- @staticmethod
- def parse_ticket_number(name: str) -> str | None:
- """Try to parse a ticket number from a string"""
- # detect manual ticket assignment
- ticket_match = re.fullmatch(r"#(\d{6,10})", name)
- if ticket_match:
- ticket_id = ticket_match.group(1)
- LOG.info(f"{ticket_id}: detected ticket in order name")
- return ticket_id
- LOG.info(f"Could not detected ticket number in name {name}")
- return None
-
def create_ticket(
- self, order: OrderIn, user_name: str, user_mail: str, project: str
+ self, order: Order, user_name: str, user_mail: str, order_type: OrderType
) -> int | None:
"""Create a ticket and return the ticket number"""
message: str = self.create_new_ticket_header(
message=self.create_xml_sample_list(order=order, user_name=user_name),
order=order,
- project=project,
+ order_type=order_type,
)
with TemporaryDirectory() as temp_dir:
@@ -57,7 +47,7 @@ def create_ticket(
name=user_name,
subject=order.name,
type="Order",
- tags=[order.samples[0].data_analysis],
+ tags=[ORDER_TYPE_WORKFLOW_MAP[order_type]],
custom_fields={
"cf_environment": self.env,
},
@@ -70,29 +60,29 @@ def create_ticket(
return ticket_response.id
- def create_attachment_file(self, order: OrderIn, temp_dir: str) -> Path:
+ def create_attachment_file(self, order: Order, temp_dir: str) -> Path:
"""Create a single attachment file for the ticket"""
order_file_path = Path(temp_dir) / "order.json"
with order_file_path.open("w") as order_file:
order_file.write(order.json())
return order_file_path
- def create_xml_sample_list(self, order: OrderIn, user_name: str) -> str:
+ def create_xml_sample_list(self, order: Order, user_name: str) -> str:
message = ""
- for sample in order.samples:
- message = self.add_sample_name_to_message(message=message, sample_name=sample.name)
- message = self.add_sample_apptag_to_message(
- message=message, application=sample.application
- )
- if isinstance(sample, Of1508Sample):
- message = self.add_sample_case_name_to_message(
- message=message, case_name=sample.family_name
+ if isinstance(order, OrderWithCases):
+ message = self.create_case_xml_sample_list(order=order, message=message)
+ else:
+ for sample in order.samples:
+ message = self.add_sample_name_to_message(message=message, sample_name=sample.name)
+ message = self.add_sample_apptag_to_message(
+ message=message, application=sample.application
)
- message = self.add_existing_sample_info_to_message(
- message=message, customer_id=order.customer, internal_id=sample.internal_id
+ message = self.add_sample_priority_to_message(
+ message=message, priority=sample.priority
+ )
+ message = self.add_sample_comment_to_message(
+ message=message, comment=sample.comment
)
- message = self.add_sample_priority_to_message(message=message, priority=sample.priority)
- message = self.add_sample_comment_to_message(message=message, comment=sample.comment)
message += self.NEW_LINE
message = self.add_order_comment_to_message(message=message, comment=order.comment)
@@ -102,15 +92,13 @@ def create_xml_sample_list(self, order: OrderIn, user_name: str) -> str:
return message
@staticmethod
- def create_new_ticket_header(message: str, order: OrderIn, project: str) -> str:
- return f"New order with {len(order.samples)} {project} samples:" + message
-
- @staticmethod
- def add_existing_ticket_header(message: str, order: OrderIn, project: str) -> str:
- return (
- f"A new order with {len(order.samples)} {project} samples has been connected to this ticket:"
- + message
+ def create_new_ticket_header(message: str, order: Order, order_type: OrderType) -> str:
+ nr_samples = (
+ len(order.samples)
+ if isinstance(order, OrderWithSamples)
+ else len(order.enumerated_new_samples)
)
+ return f"New order with {nr_samples} new {order_type} samples:" + message
def add_sample_name_to_message(self, message: str, sample_name: str) -> str:
message += f"{self.NEW_LINE}{sample_name}"
@@ -129,10 +117,8 @@ def add_sample_case_name_to_message(message: str, case_name: str | None) -> str:
return message
def add_existing_sample_info_to_message(
- self, message: str, customer_id: str, internal_id: str | None
+ self, message: str, customer_id: str, internal_id: str, case_name: str
) -> str:
- if not internal_id:
- return message
existing_sample: Sample = self.status_db.get_sample_by_internal_id(internal_id=internal_id)
@@ -140,7 +126,7 @@ def add_existing_sample_info_to_message(
if existing_sample.customer_id != customer_id:
sample_customer = " from " + existing_sample.customer.internal_id
- message += f" (already existing sample{sample_customer})"
+ message += f"{existing_sample.name}, application: {existing_sample.application_version.application.tag}, case: {case_name} (already existing sample{sample_customer}), priority: {existing_sample.priority}"
return message
@staticmethod
@@ -189,26 +175,40 @@ def replace_empty_string_with_none(cls, obj: Any) -> Any:
obj[key] = cls.replace_empty_string_with_none(item)
return obj
- def connect_to_ticket(
- self, order: OrderIn, user_name: str, project: str, ticket_number: str
- ) -> None:
- """Appends a new order message to the ticket selected by the customer"""
- LOG.info(f"Connecting order to ticket {ticket_number}")
-
- message: str = self.add_existing_ticket_header(
- message=self.create_xml_sample_list(order=order, user_name=user_name),
- order=order,
- project=project,
- )
-
- with TemporaryDirectory() as temp_dir:
- attachments: Path = self.create_attachment_file(order=order, temp_dir=temp_dir)
-
- reply = ReplyCreate(ticket_number=ticket_number, body=message)
-
- self.client.reply_to_ticket(
- reply=reply,
- attachments=[attachments],
- )
-
- LOG.info(f"Connected order to ticket {ticket_number} in Freshdesk")
+ def create_case_xml_sample_list(self, order, message: str) -> str:
+ for case in order.cases:
+ if not case.is_new:
+ db_case = self.status_db.get_case_by_internal_id(case.internal_id)
+ for sample in db_case.samples:
+ message = self.add_existing_sample_info_to_message(
+ message=message,
+ customer_id=sample.customer.internal_id,
+ internal_id=sample.internal_id,
+ case_name=db_case.name,
+ )
+ else:
+ for sample in case.samples:
+ if not sample.is_new:
+ message = self.add_existing_sample_info_to_message(
+ message=message,
+ customer_id=order.customer,
+ internal_id=sample.internal_id,
+ case_name=case.name,
+ )
+ else:
+ message = self.add_sample_name_to_message(
+ message=message, sample_name=sample.name
+ )
+ message = self.add_sample_apptag_to_message(
+ message=message, application=sample.application
+ )
+ message = self.add_sample_case_name_to_message(
+ message=message, case_name=case.name
+ )
+ message = self.add_sample_priority_to_message(
+ message=message, priority=case.priority
+ )
+ message = self.add_sample_comment_to_message(
+ message=message, comment=sample.comment
+ )
+ return message
diff --git a/cg/services/orders/submitters/case_order_submitter.py b/cg/services/orders/submitters/case_order_submitter.py
deleted file mode 100644
index d493965a47..0000000000
--- a/cg/services/orders/submitters/case_order_submitter.py
+++ /dev/null
@@ -1,34 +0,0 @@
-"""Module for a generic order submitter."""
-
-from cg.models.orders.order import OrderIn
-from cg.services.orders.store_order_services.store_case_order import StoreCaseOrderService
-from cg.services.orders.submitters.order_submitter import OrderSubmitter
-from cg.services.orders.validate_order_services.validate_case_order import (
- ValidateCaseOrderService,
-)
-
-
-class CaseOrderSubmitter(OrderSubmitter):
- """
- Class for submitting generic orders.
- This class is used to submit orders for the following workflows:
- - Balsamic
- - Balsamic QC
- - Balsamic UMI
- - MIP DNA
- - MIP RNA
- - Tomte
- """
-
- def __init__(
- self,
- order_validation_service: ValidateCaseOrderService,
- order_store_service: StoreCaseOrderService,
- ):
- self.order_validation_service = order_validation_service
- self.order_store_service = order_store_service
-
- def submit_order(self, order_in: OrderIn) -> dict:
- """Submit a generic order."""
- self.order_validation_service.validate_order(order_in)
- return self.order_store_service.store_order(order_in)
diff --git a/cg/services/orders/submitters/fastq_order_submitter.py b/cg/services/orders/submitters/fastq_order_submitter.py
deleted file mode 100644
index 58025d939b..0000000000
--- a/cg/services/orders/submitters/fastq_order_submitter.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from cg.models.orders.order import OrderIn
-from cg.services.orders.store_order_services.store_fastq_order_service import StoreFastqOrderService
-from cg.services.orders.submitters.order_submitter import OrderSubmitter
-from cg.services.orders.validate_order_services.validate_fastq_order import (
- ValidateFastqOrderService,
-)
-
-
-class FastqOrderSubmitter(OrderSubmitter):
- """Submitter for fastq orders."""
-
- def __init__(
- self,
- order_validation_service: ValidateFastqOrderService,
- order_store_service: StoreFastqOrderService,
- ):
- self.order_validation_service = order_validation_service
- self.order_store_service = order_store_service
-
- def submit_order(self, order_in: OrderIn) -> dict:
- """Submit a fastq order."""
- self.order_validation_service.validate_order(order_in)
- result: dict = self.order_store_service.store_order(order_in)
- return result
diff --git a/cg/services/orders/submitters/metagenome_order_submitter.py b/cg/services/orders/submitters/metagenome_order_submitter.py
deleted file mode 100644
index d7ee19611b..0000000000
--- a/cg/services/orders/submitters/metagenome_order_submitter.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from cg.services.orders.store_order_services.store_metagenome_order import (
- StoreMetagenomeOrderService,
-)
-from cg.services.orders.submitters.order_submitter import OrderSubmitter
-from cg.services.orders.validate_order_services.validate_metagenome_order import (
- ValidateMetagenomeOrderService,
-)
-
-
-class MetagenomeOrderSubmitter(OrderSubmitter):
- """Class for submitting metagenome and taxprofiler orders."""
-
- def __init__(
- self,
- order_validation_service: ValidateMetagenomeOrderService,
- order_store_service: StoreMetagenomeOrderService,
- ):
- self.order_validation_service = order_validation_service
- self.order_store_service = order_store_service
-
- def submit_order(self, order_in) -> dict:
- """Submit a metagenome order."""
- self.order_validation_service.validate_order(order_in)
- return self.order_store_service.store_order(order_in)
diff --git a/cg/services/orders/submitters/microbial_order_submitter.py b/cg/services/orders/submitters/microbial_order_submitter.py
deleted file mode 100644
index 9168630029..0000000000
--- a/cg/services/orders/submitters/microbial_order_submitter.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from cg.services.orders.store_order_services.store_microbial_fastq_order_service import (
- StoreMicrobialFastqOrderService,
-)
-from cg.services.orders.store_order_services.store_microbial_order import StoreMicrobialOrderService
-from cg.services.orders.submitters.order_submitter import OrderSubmitter
-from cg.services.orders.validate_order_services.validate_microbial_order import (
- ValidateMicrobialOrderService,
-)
-
-
-class MicrobialOrderSubmitter(OrderSubmitter):
- """
- Class for submitting microbial orders.
- This class is used to submit orders for the following workflows:
- - Sars-Cov-2
- - Microsalt
- - Microbial fastq
- - Mutant
- """
-
- def __init__(
- self,
- order_validation_service: ValidateMicrobialOrderService,
- order_store_service: StoreMicrobialOrderService | StoreMicrobialFastqOrderService,
- ):
- self.order_validation_service = order_validation_service
- self.order_store_service = order_store_service
-
- def submit_order(self, order_in) -> dict:
- """Submit a microbial order."""
- self.order_validation_service.validate_order(order_in)
- return self.order_store_service.store_order(order_in)
diff --git a/cg/services/orders/submitters/order_submitter.py b/cg/services/orders/submitters/order_submitter.py
deleted file mode 100644
index 1b25cdf807..0000000000
--- a/cg/services/orders/submitters/order_submitter.py
+++ /dev/null
@@ -1,56 +0,0 @@
-"""Abstract base classes for order submitters."""
-
-import logging
-from abc import ABC, abstractmethod
-
-from cg.models.orders.order import OrderIn
-from cg.services.orders.order_lims_service.order_lims_service import OrderLimsService
-from cg.store.store import Store
-
-LOG = logging.getLogger(__name__)
-
-
-class ValidateOrderService(ABC):
- @abstractmethod
- def __init__(self, status_db: Store):
- self.status_db = status_db
-
- @abstractmethod
- def validate_order(self, order_in: OrderIn):
- pass
-
-
-class StoreOrderService(ABC):
- @abstractmethod
- def __init__(self, status_db: Store, lims_service: OrderLimsService):
- self.status_db = status_db
- self.lims = lims_service
-
- @abstractmethod
- def store_order(self, order_in: OrderIn):
- pass
-
- @staticmethod
- def _fill_in_sample_ids(samples: list[dict], lims_map: dict, id_key: str = "internal_id"):
- """Fill in LIMS sample ids."""
- for sample in samples:
- LOG.debug(f"{sample['name']}: link sample to LIMS")
- if not sample.get(id_key):
- internal_id = lims_map[sample["name"]]
- LOG.info(f"{sample['name']} -> {internal_id}: connect sample to LIMS")
- sample[id_key] = internal_id
-
-
-class OrderSubmitter(ABC):
- @abstractmethod
- def __init__(
- self,
- validate_order_service: ValidateOrderService,
- store_order_service: StoreOrderService,
- ):
- self.order_validation_service = validate_order_service
- self.order_store_service = store_order_service
-
- @abstractmethod
- def submit_order(self, order_in: OrderIn) -> dict:
- pass
diff --git a/cg/services/orders/submitters/order_submitter_registry.py b/cg/services/orders/submitters/order_submitter_registry.py
deleted file mode 100644
index 2267d5cd94..0000000000
--- a/cg/services/orders/submitters/order_submitter_registry.py
+++ /dev/null
@@ -1,195 +0,0 @@
-from cg.apps.lims import LimsAPI
-from cg.models.orders.constants import OrderType
-from cg.services.orders.order_lims_service.order_lims_service import OrderLimsService
-from cg.services.orders.store_order_services.store_case_order import (
- StoreCaseOrderService,
-)
-from cg.services.orders.store_order_services.store_fastq_order_service import (
- StoreFastqOrderService,
-)
-from cg.services.orders.store_order_services.store_metagenome_order import (
- StoreMetagenomeOrderService,
-)
-from cg.services.orders.store_order_services.store_microbial_fastq_order_service import (
- StoreMicrobialFastqOrderService,
-)
-from cg.services.orders.store_order_services.store_microbial_order import (
- StoreMicrobialOrderService,
-)
-from cg.services.orders.store_order_services.store_pacbio_order_service import (
- StorePacBioOrderService,
-)
-from cg.services.orders.store_order_services.store_pool_order import (
- StorePoolOrderService,
-)
-from cg.services.orders.submitters.case_order_submitter import CaseOrderSubmitter
-from cg.services.orders.submitters.fastq_order_submitter import FastqOrderSubmitter
-from cg.services.orders.submitters.metagenome_order_submitter import (
- MetagenomeOrderSubmitter,
-)
-from cg.services.orders.submitters.microbial_order_submitter import (
- MicrobialOrderSubmitter,
-)
-from cg.services.orders.submitters.order_submitter import OrderSubmitter
-from cg.services.orders.submitters.pacbio_order_submitter import PacbioOrderSubmitter
-from cg.services.orders.submitters.pool_order_submitter import PoolOrderSubmitter
-from cg.services.orders.validate_order_services.validate_case_order import (
- ValidateCaseOrderService,
-)
-from cg.services.orders.validate_order_services.validate_fastq_order import (
- ValidateFastqOrderService,
-)
-from cg.services.orders.validate_order_services.validate_metagenome_order import (
- ValidateMetagenomeOrderService,
-)
-from cg.services.orders.validate_order_services.validate_microbial_order import (
- ValidateMicrobialOrderService,
-)
-from cg.services.orders.validate_order_services.validate_pacbio_order import (
- ValidatePacbioOrderService,
-)
-from cg.services.orders.validate_order_services.validate_pool_order import (
- ValidatePoolOrderService,
-)
-from cg.store.store import Store
-
-
-class OrderSubmitterRegistry:
- """
- A registry for OrderSubmitter instances, keyed by OrderType.
- """
-
- def __init__(self):
- self._registry = {}
-
- def register(self, order_type: OrderType, order_submitter: OrderSubmitter):
- """Register an OrderSubmitter instance for a given OrderType."""
- self._registry[order_type] = order_submitter
-
- def get_order_submitter(self, order_type: OrderType) -> OrderSubmitter:
- """Fetch the registered OrderSubmitter for the given OrderType."""
- if order_submitter := self._registry.get(order_type):
- return order_submitter
- raise ValueError(f"No OrderSubmitter registered for order type: {order_type}")
-
-
-order_service_mapping = {
- OrderType.BALSAMIC: (
- OrderLimsService,
- ValidateCaseOrderService,
- StoreCaseOrderService,
- CaseOrderSubmitter,
- ),
- OrderType.BALSAMIC_QC: (
- OrderLimsService,
- ValidateCaseOrderService,
- StoreCaseOrderService,
- CaseOrderSubmitter,
- ),
- OrderType.BALSAMIC_UMI: (
- OrderLimsService,
- ValidateCaseOrderService,
- StoreCaseOrderService,
- CaseOrderSubmitter,
- ),
- OrderType.FASTQ: (
- OrderLimsService,
- ValidateFastqOrderService,
- StoreFastqOrderService,
- FastqOrderSubmitter,
- ),
- OrderType.FLUFFY: (
- OrderLimsService,
- ValidatePoolOrderService,
- StorePoolOrderService,
- PoolOrderSubmitter,
- ),
- OrderType.METAGENOME: (
- OrderLimsService,
- ValidateMetagenomeOrderService,
- StoreMetagenomeOrderService,
- MetagenomeOrderSubmitter,
- ),
- OrderType.MICROBIAL_FASTQ: (
- OrderLimsService,
- ValidateMicrobialOrderService,
- StoreMicrobialFastqOrderService,
- MicrobialOrderSubmitter,
- ),
- OrderType.MICROSALT: (
- OrderLimsService,
- ValidateMicrobialOrderService,
- StoreMicrobialOrderService,
- MicrobialOrderSubmitter,
- ),
- OrderType.MIP_DNA: (
- OrderLimsService,
- ValidateCaseOrderService,
- StoreCaseOrderService,
- CaseOrderSubmitter,
- ),
- OrderType.MIP_RNA: (
- OrderLimsService,
- ValidateCaseOrderService,
- StoreCaseOrderService,
- CaseOrderSubmitter,
- ),
- OrderType.PACBIO_LONG_READ: (
- OrderLimsService,
- ValidatePacbioOrderService,
- StorePacBioOrderService,
- PacbioOrderSubmitter,
- ),
- OrderType.RML: (
- OrderLimsService,
- ValidatePoolOrderService,
- StorePoolOrderService,
- PoolOrderSubmitter,
- ),
- OrderType.RNAFUSION: (
- OrderLimsService,
- ValidateCaseOrderService,
- StoreCaseOrderService,
- CaseOrderSubmitter,
- ),
- OrderType.SARS_COV_2: (
- OrderLimsService,
- ValidateMicrobialOrderService,
- StoreMicrobialOrderService,
- MicrobialOrderSubmitter,
- ),
- OrderType.TAXPROFILER: (
- OrderLimsService,
- ValidateMetagenomeOrderService,
- StoreMetagenomeOrderService,
- MetagenomeOrderSubmitter,
- ),
- OrderType.TOMTE: (
- OrderLimsService,
- ValidateCaseOrderService,
- StoreCaseOrderService,
- CaseOrderSubmitter,
- ),
-}
-
-
-def build_submitter(lims: LimsAPI, status_db: Store, order_type: OrderType) -> OrderSubmitter:
- """Build an OrderSubmitter instance for the given OrderType."""
- lims_service, validation_service, store_service, submitter_class = order_service_mapping[
- order_type
- ]
- return submitter_class(
- order_validation_service=validation_service(status_db),
- order_store_service=store_service(status_db, lims_service(lims)),
- )
-
-
-def setup_order_submitter_registry(lims: LimsAPI, status_db: Store) -> OrderSubmitterRegistry:
- """Set up the OrderSubmitterRegistry with all OrderSubmitter instances."""
- registry = OrderSubmitterRegistry()
- for order_type in order_service_mapping.keys():
- registry.register(
- order_type=order_type,
- order_submitter=build_submitter(lims=lims, status_db=status_db, order_type=order_type),
- )
- return registry
diff --git a/cg/services/orders/submitters/pacbio_order_submitter.py b/cg/services/orders/submitters/pacbio_order_submitter.py
deleted file mode 100644
index 03bb274581..0000000000
--- a/cg/services/orders/submitters/pacbio_order_submitter.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from cg.models.orders.order import OrderIn
-from cg.services.orders.store_order_services.store_pacbio_order_service import (
- StorePacBioOrderService,
-)
-from cg.services.orders.submitters.order_submitter import OrderSubmitter
-from cg.services.orders.validate_order_services.validate_pacbio_order import (
- ValidatePacbioOrderService,
-)
-
-
-class PacbioOrderSubmitter(OrderSubmitter):
- """Submitter for Pacbio orders."""
-
- def __init__(
- self,
- order_validation_service: ValidatePacbioOrderService,
- order_store_service: StorePacBioOrderService,
- ):
- self.order_validation_service = order_validation_service
- self.order_store_service = order_store_service
-
- def submit_order(self, order_in: OrderIn) -> dict:
- """Submit a fastq order."""
- self.order_validation_service.validate_order(order_in)
- result: dict = self.order_store_service.store_order(order_in)
- return result
diff --git a/cg/services/orders/submitters/pool_order_submitter.py b/cg/services/orders/submitters/pool_order_submitter.py
deleted file mode 100644
index 6d9b3290ef..0000000000
--- a/cg/services/orders/submitters/pool_order_submitter.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from cg.models.orders.order import OrderIn
-from cg.services.orders.store_order_services.store_pool_order import StorePoolOrderService
-from cg.services.orders.submitters.order_submitter import OrderSubmitter
-from cg.services.orders.validate_order_services.validate_pool_order import ValidatePoolOrderService
-
-
-class PoolOrderSubmitter(OrderSubmitter):
- """
- Class for submitting pool orders.
- This class is used to submit orders for the following workflows:
- - Fluffy
- - RML (Ready made libraries)
-
- """
-
- def __init__(
- self,
- order_validation_service: ValidatePoolOrderService,
- order_store_service: StorePoolOrderService,
- ):
- self.order_validation_service = order_validation_service
- self.order_store_service = order_store_service
-
- def submit_order(self, order_in: OrderIn) -> dict:
- self.order_validation_service.validate_order(order_in)
- return self.order_store_service.store_order(order_in)
diff --git a/cg/services/orders/validate_order_services/validate_case_order.py b/cg/services/orders/validate_order_services/validate_case_order.py
deleted file mode 100644
index 01dc68a184..0000000000
--- a/cg/services/orders/validate_order_services/validate_case_order.py
+++ /dev/null
@@ -1,102 +0,0 @@
-from cg.exc import OrderError
-from cg.models.orders.constants import OrderType
-from cg.models.orders.order import OrderIn
-from cg.models.orders.samples import Of1508Sample, OrderInSample
-from cg.services.orders.submitters.order_submitter import ValidateOrderService
-from cg.store.models import Sample, Customer
-from cg.store.store import Store
-
-
-class ValidateCaseOrderService(ValidateOrderService):
-
- def __init__(self, status_db: Store):
- self.status_db = status_db
-
- def validate_order(self, order: OrderIn) -> None:
- self._validate_subject_sex(samples=order.samples, customer_id=order.customer)
- self._validate_samples_available_to_customer(
- samples=order.samples, customer_id=order.customer
- )
- self._validate_case_names_are_unique(samples=order.samples, customer_id=order.customer)
- if order.order_type == OrderType.RNAFUSION:
- self._validate_only_one_sample_per_case(samples=order.samples)
-
- def _validate_subject_sex(self, samples: [Of1508Sample], customer_id: str):
- """Validate that sex is consistent with existing samples, skips samples of unknown sex
-
- Args:
- samples (list[dict]): Samples to validate
- customer_id (str): Customer that the samples belong to
- Returns:
- Nothing
- """
- sample: Of1508Sample
- for sample in samples:
- subject_id: str = sample.subject_id
- if not subject_id:
- continue
- new_gender: str = sample.sex
- if new_gender == "unknown":
- continue
-
- existing_samples: list[Sample] = self.status_db.get_samples_by_customer_and_subject_id(
- customer_internal_id=customer_id, subject_id=subject_id
- )
- existing_sample: Sample
- for existing_sample in existing_samples:
- previous_gender = existing_sample.sex
- if previous_gender == "unknown":
- continue
-
- if previous_gender != new_gender:
- raise OrderError(
- f"Sample gender inconsistency for subject_id: {subject_id}: previous gender {previous_gender}, new gender {new_gender}"
- )
-
- def _validate_samples_available_to_customer(
- self, samples: list[OrderInSample], customer_id: str
- ) -> None:
- """Validate that the customer have access to all samples"""
- sample: Of1508Sample
- for sample in samples:
- if not sample.internal_id:
- continue
-
- existing_sample: Sample = self.status_db.get_sample_by_internal_id(
- internal_id=sample.internal_id
- )
-
- data_customer: Customer = self.status_db.get_customer_by_internal_id(
- customer_internal_id=customer_id
- )
-
- if existing_sample.customer not in data_customer.collaborators:
- raise OrderError(f"Sample not available: {sample.name}")
-
- def _validate_case_names_are_unique(
- self, samples: list[OrderInSample], customer_id: str
- ) -> None:
- """Validate that the names of all cases are unused for all samples"""
-
- customer: Customer = self.status_db.get_customer_by_internal_id(
- customer_internal_id=customer_id
- )
-
- sample: Of1508Sample
- for sample in samples:
- if self._is_rerun_of_existing_case(sample=sample):
- continue
- if self.status_db.get_case_by_name_and_customer(
- customer=customer, case_name=sample.family_name
- ):
- raise OrderError(f"Case name {sample.family_name} already in use")
-
- @staticmethod
- def _is_rerun_of_existing_case(sample: Of1508Sample) -> bool:
- return sample.case_internal_id is not None
-
- @staticmethod
- def _validate_only_one_sample_per_case(samples: list[Of1508Sample]) -> None:
- """Validates that each case contains only one sample."""
- if len({sample.family_name for sample in samples}) != len(samples):
- raise OrderError("Each case in an RNAFUSION order must have exactly one sample.")
diff --git a/cg/services/orders/validate_order_services/validate_fastq_order.py b/cg/services/orders/validate_order_services/validate_fastq_order.py
deleted file mode 100644
index cbfe5728a7..0000000000
--- a/cg/services/orders/validate_order_services/validate_fastq_order.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from cg.models.orders.order import OrderIn
-from cg.services.orders.submitters.order_submitter import ValidateOrderService
-from cg.store.store import Store
-
-
-class ValidateFastqOrderService(ValidateOrderService):
-
- def __init__(self, status_db: Store):
- self.status_db = status_db
-
- def validate_order(self, order: OrderIn) -> None:
- pass
diff --git a/cg/services/orders/validate_order_services/validate_metagenome_order.py b/cg/services/orders/validate_order_services/validate_metagenome_order.py
deleted file mode 100644
index 330abc985e..0000000000
--- a/cg/services/orders/validate_order_services/validate_metagenome_order.py
+++ /dev/null
@@ -1,30 +0,0 @@
-from cg.exc import OrderError
-from cg.models.orders.order import OrderIn
-from cg.models.orders.samples import MetagenomeSample
-from cg.services.orders.submitters.order_submitter import ValidateOrderService
-from cg.store.models import Customer
-from cg.store.store import Store
-
-
-class ValidateMetagenomeOrderService(ValidateOrderService):
-
- def __init__(self, status_db: Store):
- self.status_db = status_db
-
- def validate_order(self, order: OrderIn) -> None:
- self._validate_sample_names_are_unique(samples=order.samples, customer_id=order.customer)
-
- def _validate_sample_names_are_unique(
- self, samples: list[MetagenomeSample], customer_id: str
- ) -> None:
- """Validate that the names of all samples are unused."""
- customer: Customer = self.status_db.get_customer_by_internal_id(
- customer_internal_id=customer_id
- )
- for sample in samples:
- if sample.control:
- continue
- if self.status_db.get_sample_by_customer_and_name(
- customer_entry_id=[customer.id], sample_name=sample.name
- ):
- raise OrderError(f"Sample name {sample.name} already in use")
diff --git a/cg/services/orders/validate_order_services/validate_microbial_order.py b/cg/services/orders/validate_order_services/validate_microbial_order.py
deleted file mode 100644
index 82b3acff24..0000000000
--- a/cg/services/orders/validate_order_services/validate_microbial_order.py
+++ /dev/null
@@ -1,36 +0,0 @@
-from cg.exc import OrderError
-from cg.models.orders.constants import OrderType
-from cg.models.orders.order import OrderIn
-from cg.models.orders.samples import SarsCov2Sample
-from cg.services.orders.submitters.order_submitter import ValidateOrderService
-from cg.store.models import Customer
-from cg.store.store import Store
-
-
-class ValidateMicrobialOrderService(ValidateOrderService):
-
- def __init__(self, status_db: Store):
- self.status_db = status_db
-
- def validate_order(self, order: OrderIn) -> None:
- if order.order_type == OrderType.SARS_COV_2:
- self._validate_sample_names_are_available(
- samples=order.samples, customer_id=order.customer
- )
-
- def _validate_sample_names_are_available(
- self, samples: list[SarsCov2Sample], customer_id: str
- ) -> None:
- """Validate names of all samples are not already in use."""
- customer: Customer = self.status_db.get_customer_by_internal_id(
- customer_internal_id=customer_id
- )
- for sample in samples:
- if sample.control:
- continue
- if self.status_db.get_sample_by_customer_and_name(
- customer_entry_id=[customer.id], sample_name=sample.name
- ):
- raise OrderError(
- f"Sample name {sample.name} already in use for customer {customer.name}"
- )
diff --git a/cg/services/orders/validate_order_services/validate_pacbio_order.py b/cg/services/orders/validate_order_services/validate_pacbio_order.py
deleted file mode 100644
index 30af38e669..0000000000
--- a/cg/services/orders/validate_order_services/validate_pacbio_order.py
+++ /dev/null
@@ -1,45 +0,0 @@
-from cg.exc import OrderError
-from cg.models.orders.order import OrderIn
-from cg.models.orders.samples import PacBioSample
-from cg.services.orders.submitters.order_submitter import ValidateOrderService
-from cg.store.models import ApplicationVersion, Customer
-from cg.store.store import Store
-
-
-class ValidatePacbioOrderService(ValidateOrderService):
-
- def __init__(self, status_db: Store):
- self.status_db = status_db
-
- def validate_order(self, order: OrderIn) -> None:
- self._validate_customer_exists(order.customer)
- self._validate_applications_exist(order.samples)
- self._validate_sample_names_available(samples=order.samples, customer_id=order.customer)
-
- def _validate_customer_exists(self, customer_id: str) -> None:
- customer: Customer = self.status_db.get_customer_by_internal_id(
- customer_internal_id=customer_id
- )
- if not customer:
- raise OrderError(f"Unknown customer: {customer_id}")
-
- def _validate_applications_exist(self, samples: list[PacBioSample]) -> None:
- for sample in samples:
- application_tag = sample.application
- application_version: ApplicationVersion = (
- self.status_db.get_current_application_version_by_tag(tag=application_tag)
- )
- if application_version is None:
- raise OrderError(f"Invalid application: {sample.application}")
-
- def _validate_sample_names_available(
- self, samples: list[PacBioSample], customer_id: str
- ) -> None:
- customer: Customer = self.status_db.get_customer_by_internal_id(customer_id)
- for sample in samples:
- if self.status_db.get_sample_by_customer_and_name(
- customer_entry_id=[customer.id], sample_name=sample.name
- ):
- raise OrderError(
- f"Sample name already used in a previous order by the same customer: {sample.name}"
- )
diff --git a/cg/services/orders/validate_order_services/validate_pool_order.py b/cg/services/orders/validate_order_services/validate_pool_order.py
deleted file mode 100644
index 4206a16e29..0000000000
--- a/cg/services/orders/validate_order_services/validate_pool_order.py
+++ /dev/null
@@ -1,35 +0,0 @@
-from cg.exc import OrderError
-from cg.models.orders.order import OrderIn
-from cg.models.orders.samples import RmlSample
-from cg.services.orders.submitters.order_submitter import ValidateOrderService
-from cg.store.models import Customer
-from cg.store.store import Store
-
-
-class ValidatePoolOrderService(ValidateOrderService):
-
- def __init__(self, status_db: Store):
- self.status_db = status_db
-
- def validate_order(self, order: OrderIn) -> None:
- self._validate_case_names_are_available(
- customer_id=order.customer, samples=order.samples, ticket=order.ticket
- )
-
- def _validate_case_names_are_available(
- self, customer_id: str, samples: list[RmlSample], ticket: str
- ):
- """Validate names of all samples are not already in use."""
- customer: Customer = self.status_db.get_customer_by_internal_id(
- customer_internal_id=customer_id
- )
- for sample in samples:
- case_name: str = self.create_case_name(pool_name=sample.pool, ticket=ticket)
- if self.status_db.get_case_by_name_and_customer(customer=customer, case_name=case_name):
- raise OrderError(
- f"Case name {case_name} already in use for customer {customer.name}"
- )
-
- @staticmethod
- def create_case_name(ticket: str, pool_name: str) -> str:
- return f"{ticket}-{pool_name}"
diff --git a/cg/services/orders/validation/__init__.py b/cg/services/orders/validation/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/cg/services/orders/validation/constants.py b/cg/services/orders/validation/constants.py
new file mode 100644
index 0000000000..c621a8b404
--- /dev/null
+++ b/cg/services/orders/validation/constants.py
@@ -0,0 +1,48 @@
+from enum import StrEnum, auto
+
+
+class TissueBlockEnum(StrEnum):
+ SMALL: str = auto()
+ LARGE: str = auto()
+ BLANK: str = ""
+
+
+class ElutionBuffer(StrEnum):
+ """The choices of buffers."""
+
+ OTHER = "Other"
+ TRIS_HCL = "Tris-HCl"
+ WATER = "Nuclease-free water"
+
+
+ALLOWED_SKIP_RC_BUFFERS = [ElutionBuffer.TRIS_HCL, ElutionBuffer.WATER]
+
+MINIMUM_VOLUME, MAXIMUM_VOLUME = 20, 130
+
+
+class ExtractionMethod(StrEnum):
+ EZ1 = "EZ1"
+ MAELSTROM = "Maelstrom"
+ MAGNAPURE_96 = "MagNaPure 96"
+ QIAGEN_MAGATTRACT = "Qiagen MagAttract"
+ QIASYMPHONE = "QIAsymphony"
+ OTHER = 'Other (specify in "Comments")'
+
+
+class IndexEnum(StrEnum):
+ AVIDA_INDEX_PLATE = "Avida Index plate"
+ AVIDA_INDEX_STRIP = "Avida Index strip"
+ IDT_DS_B = "IDT DupSeq 10 bp Set B"
+ IDT_DS_F = "IDT DupSeq 10 bp Set F"
+ IDT_XGEN_UDI = "IDT xGen UDI Adapters"
+ KAPA_UDI_NIPT = "KAPA UDI NIPT"
+ NEXTERA_XT = "Nextera XT Dual"
+ NEXTFLEX_UDI_96 = "NEXTflex® Unique Dual Index Barcodes 1 - 96"
+ NEXTFLEX_V2_UDI_96 = "NEXTflex® v2 UDI Barcodes 1 - 96"
+ TEN_X_TN_A = "10X Genomics Dual Index kit TN Set A"
+ TEN_X_TT_A = "10X Genomics Dual Index kit TT Set A"
+ TWIST_UDI_A = "TWIST UDI Set A"
+ TWIST_UDI_B = "TWIST UDI Set B"
+ TWIST_UDI_C = "TWIST UDI Set C"
+ TRUSEQ_DNA_HT = "TruSeq DNA HT Dual-index (D7-D5)"
+ NO_INDEX = "NoIndex"
diff --git a/cg/services/orders/validation/errors/__init__.py b/cg/services/orders/validation/errors/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/cg/services/orders/validation/errors/case_errors.py b/cg/services/orders/validation/errors/case_errors.py
new file mode 100644
index 0000000000..3054f1376f
--- /dev/null
+++ b/cg/services/orders/validation/errors/case_errors.py
@@ -0,0 +1,58 @@
+from cg.services.orders.validation.errors.order_errors import OrderError
+
+
+class CaseError(OrderError):
+ case_index: int
+
+
+class RepeatedCaseNameError(CaseError):
+ field: str = "name"
+ message: str = "Case name already used"
+
+
+class InvalidGenePanelsError(CaseError):
+ def __init__(self, case_index: int, panels: list[str]):
+ message = "Invalid panels: " + ",".join(panels)
+ super(CaseError, self).__init__(field="panels", case_index=case_index, message=message)
+
+
+class RepeatedGenePanelsError(CaseError):
+ field: str = "panels"
+ message: str = "Gene panels must be unique"
+
+
+class CaseNameNotAvailableError(CaseError):
+ field: str = "name"
+ message: str = "Case name already used in a previous order"
+
+
+class CaseDoesNotExistError(CaseError):
+ field: str = "internal_id"
+ message: str = "The case does not exist"
+
+
+class CaseOutsideOfCollaborationError(CaseError):
+ field: str = "internal_id"
+ message: str = "Case does not belong to collaboration"
+
+
+class MultipleSamplesInCaseError(CaseError):
+ field: str = "sample_errors"
+ message: str = "Multiple samples in the same case not allowed"
+
+
+class MoreThanTwoSamplesInCaseError(CaseError):
+ field: str = "sample_errors"
+ message: str = "More than two samples in the same case not allowed"
+
+
+class NumberOfNormalSamplesError(CaseError):
+ field: str = "sample_errors"
+
+
+class DoubleNormalError(NumberOfNormalSamplesError):
+ message: str = "Only one non-tumour sample is allowed per case"
+
+
+class DoubleTumourError(NumberOfNormalSamplesError):
+ message: str = "Only one tumour sample is allowed per case"
diff --git a/cg/services/orders/validation/errors/case_sample_errors.py b/cg/services/orders/validation/errors/case_sample_errors.py
new file mode 100644
index 0000000000..f68b341361
--- /dev/null
+++ b/cg/services/orders/validation/errors/case_sample_errors.py
@@ -0,0 +1,172 @@
+from cg.services.orders.validation.constants import MAXIMUM_VOLUME, MINIMUM_VOLUME
+from cg.services.orders.validation.errors.case_errors import CaseError
+from cg.services.orders.validation.errors.sample_errors import SampleError
+
+
+class CaseSampleError(CaseError, SampleError):
+ pass
+
+
+class OccupiedWellError(CaseSampleError):
+ field: str = "well_position"
+ message: str = "Well is already occupied"
+
+
+class ApplicationArchivedError(CaseSampleError):
+ field: str = "application"
+ message: str = "Chosen application is archived"
+
+
+class ApplicationNotValidError(CaseSampleError):
+ field: str = "application"
+ message: str = "Chosen application does not exist"
+
+
+class ApplicationNotCompatibleError(CaseSampleError):
+ field: str = "application"
+ message: str = "Application is not allowed for the chosen workflow"
+
+
+class SampleNameRepeatedError(CaseSampleError):
+ field: str = "name"
+ message: str = "Sample name already used"
+
+
+class SampleNameSameAsCaseNameError(CaseSampleError):
+ field: str = "name"
+ message: str = "Sample name can not be the same as any case name in order"
+
+
+class InvalidFatherSexError(CaseSampleError):
+ field: str = "father"
+ message: str = "Father must be male"
+
+
+class FatherNotInCaseError(CaseSampleError):
+ field: str = "father"
+ message: str = "Father must be in the same case"
+
+
+class InvalidMotherSexError(CaseSampleError):
+ field: str = "mother"
+ message: str = "Mother must be female"
+
+
+class PedigreeError(CaseSampleError):
+ message: str = "Invalid pedigree relationship"
+
+
+class DescendantAsMotherError(PedigreeError):
+ field: str = "mother"
+ message: str = "Descendant sample cannot be mother"
+
+
+class DescendantAsFatherError(PedigreeError):
+ field: str = "father"
+ message: str = "Descendant sample cannot be father"
+
+
+class SampleIsOwnMotherError(PedigreeError):
+ field: str = "mother"
+ message: str = "Sample cannot be its own mother"
+
+
+class SampleIsOwnFatherError(PedigreeError):
+ field: str = "father"
+ message: str = "Sample cannot be its own father"
+
+
+class MotherNotInCaseError(CaseSampleError):
+ field: str = "mother"
+ message: str = "Mother must be in the same case"
+
+
+class SampleDoesNotExistError(CaseSampleError):
+ field: str = "internal_id"
+ message: str = "The sample does not exist"
+
+
+class SubjectIdSameAsCaseNameError(CaseSampleError):
+ field: str = "subject_id"
+ message: str = "Subject id must be different from the case name"
+
+
+class ConcentrationRequiredIfSkipRCError(CaseSampleError):
+ field: str = "concentration_ng_ul"
+ message: str = "Concentration is required when skipping reception control"
+
+
+class SubjectIdSameAsSampleNameError(CaseSampleError):
+ field: str = "subject_id"
+ message: str = "Subject id must be different from the sample name"
+
+
+class InvalidConcentrationIfSkipRCError(CaseSampleError):
+ def __init__(self, case_index: int, sample_index: int, allowed_interval: tuple[float, float]):
+ field: str = "concentration_ng_ul"
+ message: str = (
+ f"Concentration must be between {allowed_interval[0]} ng/μL and {allowed_interval[1]} ng/μL if reception control should be skipped"
+ )
+ super(CaseSampleError, self).__init__(
+ case_index=case_index, sample_index=sample_index, field=field, message=message
+ )
+
+
+class WellPositionMissingError(CaseSampleError):
+ field: str = "well_position"
+ message: str = "Well position is required for well plates"
+
+
+class ContainerNameMissingError(CaseSampleError):
+ field: str = "container_name"
+ message: str = "Container name is required for well plates"
+
+
+class InvalidVolumeError(CaseSampleError):
+ field: str = "volume"
+ message: str = f"Volume must be between {MINIMUM_VOLUME}-{MAXIMUM_VOLUME} μL"
+
+
+class VolumeRequiredError(CaseSampleError):
+ field: str = "volume"
+ message: str = "Volume is required"
+
+
+class InvalidBufferError(CaseSampleError):
+ field: str = "elution_buffer"
+ message: str = "The chosen buffer is not allowed when skipping reception control"
+
+
+class SexSubjectIdError(CaseSampleError):
+ field: str = "sex"
+ message: str = "Another sample with the same subject id has a different sex"
+
+
+class CaptureKitMissingError(CaseSampleError):
+ field: str = "capture_kit"
+ message: str = "Bait set is required for TGS analyses"
+
+
+class WellFormatError(CaseSampleError):
+ field: str = "well_position"
+ message: str = "Well position must follow the format A-H:1-12"
+
+
+class ContainerNameRepeatedError(CaseSampleError):
+ field: str = "container_name"
+ message: str = "Tube names must be unique among samples"
+
+
+class StatusUnknownError(CaseSampleError):
+ field: str = "status"
+ message: str = "Samples in case cannot all have status unknown"
+
+
+class BufferMissingError(CaseSampleError):
+ field: str = "elution_buffer"
+ message: str = "Buffer must be specified with this application"
+
+
+class SampleOutsideOfCollaborationError(CaseSampleError):
+ field: str = "internal_id"
+ message: str = "Sample cannot be outside of collaboration"
diff --git a/cg/services/orders/validation/errors/order_errors.py b/cg/services/orders/validation/errors/order_errors.py
new file mode 100644
index 0000000000..64f68e8609
--- /dev/null
+++ b/cg/services/orders/validation/errors/order_errors.py
@@ -0,0 +1,26 @@
+from pydantic import BaseModel
+
+
+class OrderError(BaseModel):
+ field: str
+ message: str
+
+
+class UserNotAssociatedWithCustomerError(OrderError):
+ field: str = "customer"
+ message: str = "User does not belong to customer"
+
+
+class CustomerCannotSkipReceptionControlError(OrderError):
+ field: str = "skip_reception_control"
+ message: str = "Customer cannot skip reception control"
+
+
+class CustomerDoesNotExistError(OrderError):
+ field: str = "customer"
+ message: str = "Customer does not exist"
+
+
+class OrderNameRequiredError(OrderError):
+ field: str = "name"
+ message: str = "Order name is required"
diff --git a/cg/services/orders/validation/errors/sample_errors.py b/cg/services/orders/validation/errors/sample_errors.py
new file mode 100644
index 0000000000..c0b48d4a12
--- /dev/null
+++ b/cg/services/orders/validation/errors/sample_errors.py
@@ -0,0 +1,142 @@
+from cg.services.orders.validation.constants import MAXIMUM_VOLUME, MINIMUM_VOLUME, IndexEnum
+from cg.services.orders.validation.errors.order_errors import OrderError
+from cg.services.orders.validation.index_sequences import INDEX_SEQUENCES
+
+
+class SampleError(OrderError):
+ sample_index: int
+
+
+class ApplicationNotValidError(SampleError):
+ field: str = "application"
+ message: str = "Chosen application does not exist"
+
+
+class ApplicationArchivedError(SampleError):
+ field: str = "application"
+ message: str = "Chosen application is archived"
+
+
+class ApplicationNotCompatibleError(SampleError):
+ field: str = "application"
+ message: str = "Chosen application is not compatible with workflow"
+
+
+class OccupiedWellError(SampleError):
+ field: str = "well_position"
+ message: str = "Well is already occupied"
+
+
+class WellPositionMissingError(SampleError):
+ field: str = "well_position"
+ message: str = "Well position is required for well plates"
+
+
+class WellPositionRmlMissingError(SampleError):
+ field: str = "well_position_rml"
+ message: str = "Well position is required for RML plates"
+
+
+class SampleNameRepeatedError(SampleError):
+ field: str = "name"
+ message: str = "Sample name repeated"
+
+
+class InvalidVolumeError(SampleError):
+ field: str = "volume"
+ message: str = f"Volume must be between {MINIMUM_VOLUME}-{MAXIMUM_VOLUME} μL"
+
+
+class VolumeRequiredError(SampleError):
+ field: str = "volume"
+ message: str = "Volume is required"
+
+
+class SampleNameNotAvailableError(SampleError):
+ field: str = "name"
+ message: str = "Sample name already used in previous order"
+
+
+class SampleNameNotAvailableControlError(SampleError):
+ field: str = "name"
+ message: str = "Sample name already in use. Only control samples are allowed repeated names"
+
+
+class ContainerNameRepeatedError(SampleError):
+ field: str = "container_name"
+ message: str = "Tube names must be unique among samples"
+
+
+class WellFormatError(SampleError):
+ field: str = "well_position"
+ message: str = "Well position must follow the format A-H:1-12"
+
+
+class WellFormatRmlError(SampleError):
+ field: str = "well_position_rml"
+ message: str = "Well position must follow the format A-H:1-12"
+
+
+class ContainerNameMissingError(SampleError):
+ field: str = "container_name"
+ message: str = "Container must have a name"
+
+
+class BufferInvalidError(SampleError):
+ field: str = "elution_buffer"
+ message: str = "Buffer must be Tris-HCl or Nuclease-free water when skipping reception control."
+
+
+class ConcentrationRequiredError(SampleError):
+ field: str = "concentration_ng_ul"
+ message: str = "Concentration is required when skipping reception control."
+
+
+class ConcentrationInvalidIfSkipRCError(SampleError):
+ def __init__(self, sample_index: int, allowed_interval: tuple[float, float]):
+ field: str = "concentration_ng_ul"
+ message: str = (
+ f"Concentration must be between {allowed_interval[0]} ng/μL and "
+ f"{allowed_interval[1]} ng/μL if reception control should be skipped"
+ )
+ super(SampleError, self).__init__(sample_index=sample_index, field=field, message=message)
+
+
+class PoolApplicationError(SampleError):
+ def __init__(self, sample_index: int, pool_name: str):
+ field: str = "application"
+ message: str = f"Multiple applications detected in pool {pool_name}"
+ super(SampleError, self).__init__(sample_index=sample_index, field=field, message=message)
+
+
+class PoolPriorityError(SampleError):
+ def __init__(self, sample_index: int, pool_name: str):
+ field: str = "priority"
+ message: str = f"Multiple priorities detected in pool {pool_name}"
+ super(SampleError, self).__init__(sample_index=sample_index, field=field, message=message)
+
+
+class IndexNumberMissingError(SampleError):
+ field: str = "index_number"
+ message: str = "Index number is required"
+
+
+class IndexNumberOutOfRangeError(SampleError):
+ def __init__(self, sample_index: int, index: IndexEnum):
+ field: str = "index_number"
+ maximum: int = len(INDEX_SEQUENCES[index])
+ message: str = f"Index number must be a number between 1 and {maximum}"
+ super(SampleError, self).__init__(sample_index=sample_index, field=field, message=message)
+
+
+class IndexSequenceMissingError(SampleError):
+ field: str = "index_sequence"
+ message: str = "Index sequence is required"
+
+
+class IndexSequenceMismatchError(SampleError):
+ def __init__(self, sample_index: int, index: IndexEnum, index_number):
+ field: str = "index_number"
+ allowed_sequence: str = INDEX_SEQUENCES[index][index_number - 1]
+ message: str = f"Index and index number indicate sequence {allowed_sequence}"
+ super(SampleError, self).__init__(sample_index=sample_index, field=field, message=message)
diff --git a/cg/services/orders/validation/errors/validation_errors.py b/cg/services/orders/validation/errors/validation_errors.py
new file mode 100644
index 0000000000..a0e2be4f4a
--- /dev/null
+++ b/cg/services/orders/validation/errors/validation_errors.py
@@ -0,0 +1,35 @@
+from pydantic import BaseModel
+
+from cg.services.orders.validation.errors.case_errors import CaseError
+from cg.services.orders.validation.errors.case_sample_errors import CaseSampleError
+from cg.services.orders.validation.errors.order_errors import OrderError
+from cg.services.orders.validation.errors.sample_errors import SampleError
+
+
+class ValidationErrors(BaseModel):
+ order_errors: list[OrderError] = []
+ case_errors: list[CaseError] = []
+ sample_errors: list[SampleError] = []
+ case_sample_errors: list[CaseSampleError] = []
+
+ @property
+ def is_empty(self) -> bool:
+ """Return True if there are no errors in any of the attributes."""
+ return all(not getattr(self, field) for field in self.model_fields)
+
+ def get_error_message(self) -> str:
+ """Gets a string documenting all errors."""
+ error_string = ""
+ for error in self.order_errors:
+ error_string += f"Problem with {error.field}: {error.message} \n"
+ for error in self.case_errors:
+ error_string += (
+ f"Problem with {error.field} in case {error.case_index}: {error.message} \n"
+ )
+ for error in self.case_sample_errors:
+ error_string += f"Problem with {error.field} in case {error.case_index} sample {error.sample_index}: {error.message} \n"
+ for error in self.sample_errors:
+ error_string += (
+ f"Problem with {error.field} in sample {error.sample_index}: {error.message} \n"
+ )
+ return error_string
diff --git a/cg/services/orders/validation/index_sequences.py b/cg/services/orders/validation/index_sequences.py
new file mode 100644
index 0000000000..a602c96654
--- /dev/null
+++ b/cg/services/orders/validation/index_sequences.py
@@ -0,0 +1,1731 @@
+from cg.services.orders.validation.constants import IndexEnum
+
+INDEX_SEQUENCES = {
+ IndexEnum.AVIDA_INDEX_PLATE: [
+ "A01-clear (TACGATAC-CACGTGAG)",
+ "B01-clear (CGTTCGTC-GCAGTTAG)",
+ "C01-clear (GCGAATTA-CTTGATCA)",
+ "D01-clear (GCATGCCT-CGTAACTG)",
+ "E01-clear (TCGAGCAT-CGTTAACG)",
+ "F01-clear (AAGGTCGA-GCCAGTAG)",
+ "G01-clear (CGTAACTG-TGTCGTCG)",
+ "H01-clear (TCGATACA-TTAGTGCG)",
+ "A02-clear (CGTAGTTA-GTAGACTG)",
+ "B02-clear (GAGTGCGT-TCGCGTCA)",
+ "C02-clear (CGAATTCA-TTGGCATG)",
+ "D02-clear (TCTGAGTC-ATCATGCG)",
+ "E02-clear (AGCTACAT-GATCCATG)",
+ "F02-clear (CACACATA-CTCGTACG)",
+ "G02-clear (AGAACCGT-TACGGTTG)",
+ "H02-clear (ATACCTAC-TCCTTGAG)",
+ "A03-clear (AAGAGACA-TGCTTACG)",
+ "B03-clear (AGTCTTCA-CAGTATCA)",
+ "C03-clear (GATCGCCT-TGTATCAC)",
+ "D03-clear (GTAGTCAT-AACTCTTG)",
+ "E03-clear (TCTATGCG-GCGGTATG)",
+ "F03-clear (GAGGCTCT-CTAGTGTA)",
+ "G03-clear (GCCTTCAT-AACGGTCA)",
+ "H03-clear (CTGAGCTA-GCACAGTA)",
+ "A04-clear (TCAACTGC-GCTACGCA)",
+ "B04-clear (GTGCGCTT-CATCGACG)",
+ "C04-clear (TGCGCTCT-GTCGCCTA)",
+ "D04-clear (AATACGCG-CAGGTCTG)",
+ "E04-clear (ATGGTAGC-GTCGATCG)",
+ "F04-clear (GAGCACTG-GAATTGTG)",
+ "G04-clear (CATTGCAT-CGACGGTA)",
+ "H04-clear (AACACCGA-CGATGTCG)",
+ "A05-clear (CATGAATG-TCTCCTTG)",
+ "B05-clear (CTGCCTTA-GCATGGAG)",
+ "C05-clear (GCCAGTAG-GTTCGAGC)",
+ "D05-clear (CGGTCTCT-GAGATCGC)",
+ "E05-clear (CAACCGTG-AAGTGGAC)",
+ "F05-clear (CATCAGTC-TTATCTCG)",
+ "G05-clear (GATGCCAA-TTAGTTGC)",
+ "H05-clear (TGTCGTCG-GTTATCGA)",
+ "A06-clear (GTCGAAGT-CAGTGAGC)",
+ "B06-clear (TCCGAACT-TCATACCG)",
+ "C06-clear (CGCCAATT-ATGGTAGC)",
+ "D06-clear (CAAGTTAC-TATTCCTG)",
+ "E06-clear (TGCCTCGA-CAGTTGCG)",
+ "F06-clear (AACCTAAC-CGTCTGTG)",
+ "G06-clear (GCCAACAA-GTAATACG)",
+ "H06-clear (CTTGATCA-CAATGCTG)",
+ "A07-clear (GTACGGAC-GCACGATG)",
+ "B07-clear (GAGCATAC-GTACCTTG)",
+ "C07-clear (TGGATTGA-TAAGAGTG)",
+ "D07-clear (AGCAGTGA-CACAGGTG)",
+ "E07-clear (TCGACAAC-CTCTCGTG)",
+ "F07-clear (AAGTGGAC-TACGTAGC)",
+ "G07-clear (GTCCACCT-AGTTGCCG)",
+ "H07-clear (GACTTGAC-GCATCATA)",
+ "A08-clear (TGCACATC-GAGTGTAG)",
+ "B08-clear (AAGTACTC-GATAGGAC)",
+ "C08-clear (CACAGACT-TCTGTCAG)",
+ "D08-clear (GAAGGTAT-CGACTACG)",
+ "E08-clear (CGCGCAAT-CAGAGCAG)",
+ "F08-clear (CAATAGAC-ATGACTCG)",
+ "G08-clear (AGATACGA-CATACTTG)",
+ "H08-clear (TCACAAGA-TTGTGTAC)",
+ "A09-clear (TCGCCTGT-TCTGAGTC)",
+ "B09-clear (AGAGTAAG-GAGACACG)",
+ "C09-clear (ATATTCCG-GACAATTG)",
+ "D09-clear (CACAACTT-AGGTGAGA)",
+ "E09-clear (CGCTTCCA-TCGAACTG)",
+ "F09-clear (TAAGAGTG-AGAAGACG)",
+ "G09-clear (AATGGAAC-GTTGCGGA)",
+ "H09-clear (TCAGCATC-AAGGAGCG)",
+ "A10-clear (AGTACGTT-AGAGTAAG)",
+ "B10-clear (AATGCTAG-ATACTCGC)",
+ "C10-clear (TCTAGAGC-GATAGAGA)",
+ "D10-clear (GCCGATAT-AGACCTGA)",
+ "E10-clear (TCAGCTCA-TATGCCGC)",
+ "F10-clear (CGTCTGTG-TTAACGTG)",
+ "G10-clear (CAGTCATT-TCGAAGGA)",
+ "H10-clear (GCACACAT-AATACGCG)",
+ "A11-clear (ATTGAGCT-TCACTGTG)",
+ "B11-clear (TTGGATCT-TGCCGTTA)",
+ "C11-clear (CAGCAATA-ATTAGGAG)",
+ "D11-clear (TCTACCTC-ATATTCCG)",
+ "E11-clear (CTTAAGAC-GACTGACG)",
+ "F11-clear (TCCTGAGA-GTAGAGCA)",
+ "G11-clear (CTAGTGTA-AACAGCCG)",
+ "H11-clear (TTCCAACA-GCTTAGCG)",
+ "A12-clear (CTTGCGAT-CTGAGCTA)",
+ "B12-clear (GTTCATTC-GTCGGTAA)",
+ "C12-clear (CTTCACGT-GAATCGCA)",
+ "D12-clear (AACGCATT-ATAGTGAC)",
+ "E12-clear (CGTCAGAT-CGTAGTTA)",
+ "F12-clear (TCTGTCAG-CGTAAGCA)",
+ "G12-clear (TAGGACAT-CATCGGAA)",
+ "H12-clear (GCTTAGCG-ATAAGCTG)",
+ "A01-blue (CTGAATAG-TCCTGAGA)",
+ "B01-blue (AAGTGTCT-GCAGGTTC)",
+ "C01-blue (GAATCGCA-TCTAGAGC)",
+ "D01-blue (CAATCCGA-CACATTCG)",
+ "E01-blue (GCCTCGTT-AGGCAATG)",
+ "F01-blue (CATCCTGT-GCTTCCAG)",
+ "G01-blue (CTGTTGTT-CAACCGTG)",
+ "H01-blue (CTGACCGT-AGAGGTTA)",
+ "A02-blue (CAATGCTG-CATGAATG)",
+ "B02-blue (GTTGGTGT-AGTTCATG)",
+ "C02-blue (TAGACCAA-CAAGGTGA)",
+ "D02-blue (TCAAGCTA-CTTCGTAC)",
+ "E02-blue (TATGCCGC-ATTCCGTG)",
+ "F02-blue (TCGCAGAT-TTGATGGC)",
+ "G02-blue (GAACATGT-GACGTGCA)",
+ "H02-blue (GAGTGTAG-CTTAAGAC)",
+ "A03-blue (AAGGCTTA-TGCCTCGA)",
+ "B03-blue (TTAACGTG-TTGGAGAA)",
+ "C03-blue (CGATACCT-CTTATTGC)",
+ "D03-blue (GTCAGAAT-CGATATGC)",
+ "E03-blue (CGAATCAT-TGAAGTAC)",
+ "F03-blue (GAACGCAA-GCTTGCTA)",
+ "G03-blue (TGTTGGTT-GATTCGGC)",
+ "H03-blue (TCCAGACG-TGGCTTAG)",
+ "A04-blue (CATAGCCA-AGTAACGC)",
+ "B04-blue (CAAGGTGA-ATGGTTAG)",
+ "C04-blue (TCACCGAC-GTGTTGAG)",
+ "D04-blue (GAGCTGTA-GCCGTAAC)",
+ "E04-blue (GTCTCTTC-GAAGCCTC)",
+ "F04-blue (GAATCCGA-TTACACGC)",
+ "G04-blue (CGTCTGCA-GCCGATAT)",
+ "H04-blue (CGAACTGA-TCGCCTGT)",
+ ],
+ IndexEnum.AVIDA_INDEX_STRIP: [
+ "1-black (TACGATAC-CACGTGAG)",
+ "2-black (CGTTCGTC-GCAGTTAG)",
+ "3-black (GCGAATTA-CTTGATCA)",
+ "4-black (GCATGCCT-CGTAACTG)",
+ "5-black (TCGAGCAT-CGTTAACG)",
+ "6-black (AAGGTCGA-GCCAGTAG)",
+ "7-black (CGTAACTG-TGTCGTCG)",
+ "8-black (TCGATACA-TTAGTGCG)",
+ "9-blue (CGTAGTTA-GTAGACTG)",
+ "10-blue (GAGTGCGT-TCGCGTCA)",
+ "11-blue (CGAATTCA-TTGGCATG)",
+ "12-blue (TCTGAGTC-ATCATGCG)",
+ "13-blue (AGCTACAT-GATCCATG)",
+ "14-blue (CACACATA-CTCGTACG)",
+ "15-blue (AGAACCGT-TACGGTTG)",
+ "16-blue (ATACCTAC-TCCTTGAG)",
+ "17-red (AAGAGACA-TGCTTACG)",
+ "18-red (AGTCTTCA-CAGTATCA)",
+ "19-red (GATCGCCT-TGTATCAC)",
+ "20-red (GTAGTCAT-AACTCTTG)",
+ "21-red (TCTATGCG-GCGGTATG)",
+ "22-red (GAGGCTCT-CTAGTGTA)",
+ "23-red (GCCTTCAT-AACGGTCA)",
+ "24-red (CTGAGCTA-GCACAGTA)",
+ "25-white (TCAACTGC-GCTACGCA)",
+ "26-white (GTGCGCTT-CATCGACG)",
+ "27-white (TGCGCTCT-GTCGCCTA)",
+ "28-white (AATACGCG-CAGGTCTG)",
+ "29-white (ATGGTAGC-GTCGATCG)",
+ "30-white (GAGCACTG-GAATTGTG)",
+ "31-white (CATTGCAT-CGACGGTA)",
+ "32-white (AACACCGA-CGATGTCG)",
+ ],
+ IndexEnum.IDT_DS_B: [
+ "A01 IDT_10nt_541 (ATTCCACACT-AACAAGACCA)",
+ "B01 IDT_10nt_553 (TACTAGAGGA-AGACCGGTTC)",
+ "C01 IDT_10nt_568 (TGTGAGCGAA-AACTCCGATC)",
+ "D01 IDT_10nt_581 (GATGAAGGAT-AGCCTTATTC)",
+ "E01 IDT_10nt_595 (AACAACCTCA-CTTAGTCCAA)",
+ "F01 IDT_10nt_607 (AGATCTTCCA-AGATCAACAC)",
+ "G01 IDT_10nt_620 (ACCGACGTGA-CATGACCGTT)",
+ "H01 IDT_10nt_635 (AATCTCAGGC-GAGTTAGAGA)",
+ "A02 IDT_10nt_542 (AGTTAGCTGG-CATTCTCATC)",
+ "B02 IDT_10nt_554 (CGATGAGACT-GCACGACTAA)",
+ "C02 IDT_10nt_569 (CTAGTCAGAA-CAGCATTCAA)",
+ "D02 IDT_10nt_582 (CGATAGCAGG-AATGCTACGA)",
+ "E02 IDT_10nt_596 (TATGGTGATG-GCTACTTGAA)",
+ "F02 IDT_10nt_608 (ACCAGACGGT-GATGAAGACG)",
+ "G02 IDT_10nt_621 (GTTGGACGGT-ACAGTGTCCT)",
+ "H02 IDT_10nt_637 (CCGGTTCATG-CAAGACGTCT)",
+ "A03 IDT_10nt_543 (TTGGCCATAC-TGCACTAGGC)",
+ "B03 IDT_10nt_555 (TCGCTTAAGC-ACACGCCTAG)",
+ "C03 IDT_10nt_570 (AAGACAGCTT-AGGTCAGCGA)",
+ "D03 IDT_10nt_583 (TATCACTCTC-AGAACAAGGT)",
+ "E03 IDT_10nt_597 (GCTGCTAACA-CACAAGAGCT)",
+ "F03 IDT_10nt_609 (AGCTATCTTC-CTAGCGACAC)",
+ "G03 IDT_10nt_622 (AGGTGGCTAC-CGTTACGTGG)",
+ "H03 IDT_10nt_639 (AGAGGAATCG-GATAGTCTGG)",
+ "A04 IDT_10nt_544 (TGCCACTGTA-TGACCTGCTG)",
+ "B04 IDT_10nt_556 (GACAATAGAG-GTGTACACTA)",
+ "C04 IDT_10nt_571 (AGACGGCATC-AGGTTACCTT)",
+ "D04 IDT_10nt_584 (ACTGGTGTCG-ACAGGACTTG)",
+ "E04 IDT_10nt_598 (TTAGGAGGAA-ACGCAAGTTC)",
+ "F04 IDT_10nt_610 (ACAGAAGGTT-GATATCTCCA)",
+ "G04 IDT_10nt_623 (GAGAGCTAAC-GCAAGTATTG)",
+ "H04 IDT_10nt_641 (TTCAGGTCGT-AACCAACCAA)",
+ "A05 IDT_10nt_545 (CTTGTCTTGC-CACATTGGTT)",
+ "B05 IDT_10nt_557 (AACGACGCAT-CGTTGTAGTT)",
+ "C05 IDT_10nt_573 (TTGGCCAGTT-CAGGTAGCAT)",
+ "D05 IDT_10nt_586 (ATTGCGGCTG-TTGTAGGACA)",
+ "E05 IDT_10nt_599 (AGAGACTTAC-CCAACTCTTA)",
+ "F05 IDT_10nt_611 (GCTGCCGATA-ACGTAGGATA)",
+ "G05 IDT_10nt_624 (GAATGTCCAA-TACGGTATTC)",
+ "H05 IDT_10nt_642 (CTATACACCA-CCGTAGTTGG)",
+ "A06 IDT_10nt_546 (ATACCTGTGA-AGCACAATGC)",
+ "B06 IDT_10nt_558 (TCAGATTCAC-TCGCACTCCA)",
+ "C06 IDT_10nt_574 (AGAGATGGTA-TTCCGTAAGC)",
+ "D06 IDT_10nt_587 (ATGGAGACGG-GAACTCTCAG)",
+ "E06 IDT_10nt_600 (GTGTACGTCG-CCTGCTAGGA)",
+ "F06 IDT_10nt_612 (TCACCGTCAC-TAAGGAGTTG)",
+ "G06 IDT_10nt_625 (TGTGAAGCTA-TGAATCGCGT)",
+ "H06 IDT_10nt_643 (GTGTTATCTC-AGTCTGGTGT)",
+ "A07 IDT_10nt_547 (AACACAGCCG-ACCAAGGTCT)",
+ "B07 IDT_10nt_560 (CCGACTCCTT-CAAGGACAGT)",
+ "C07 IDT_10nt_575 (GACGAACGTA-TGAAGGACGA)",
+ "D07 IDT_10nt_588 (CCACTGTGAC-CATCTGTCCA)",
+ "E07 IDT_10nt_601 (GAGCATCCAT-GTGGACGTGA)",
+ "F07 IDT_10nt_613 (GTTATTAGGC-GTTCATCGAG)",
+ "G07 IDT_10nt_627 (TTATCGCTGA-CCAATCCTAA)",
+ "H07 IDT_10nt_644 (CGCTTAAGTA-GTGCAGTAAT)",
+ "A08 IDT_10nt_548 (TTACGGAACA-GACAGAGTCA)",
+ "B08 IDT_10nt_561 (CTATAGCGAG-CTAGTCGACT)",
+ "C08 IDT_10nt_576 (ACGCGGACTT-CGAGGCAATA)",
+ "D08 IDT_10nt_589 (AGAATCCTTC-ACACCTGGCA)",
+ "E08 IDT_10nt_602 (CGGAACGAAG-AGGCGATGGT)",
+ "F08 IDT_10nt_614 (TATCGTTGTG-TGCGGACACA)",
+ "G08 IDT_10nt_628 (AACACAACGA-TTGGCTGCGT)",
+ "H08 IDT_10nt_645 (CACGGTTGGT-ATTCAGCTAG)",
+ "A09 IDT_10nt_549 (TCAGCACCGT-CCTATTCAGT)",
+ "B09 IDT_10nt_563 (TCAAGTTCCT-CTAGAACTTG)",
+ "C09 IDT_10nt_577 (AGACCTAGCT-GATACTTGTG)",
+ "D09 IDT_10nt_590 (GCCTTACTTA-GTTAAGGTGC)",
+ "E09 IDT_10nt_603 (GCCTAGGACT-AGCCGTTCTC)",
+ "F09 IDT_10nt_615 (ATCTTCCTAG-TACACGCGCA)",
+ "G09 IDT_10nt_630 (TTCCACCTGG-AGAAGCTCAT)",
+ "H09 IDT_10nt_646 (TTATCTGTGC-ATACTGAGTG)",
+ "A10 IDT_10nt_550 (ACTGTCAATC-GTTAAGGACG)",
+ "B10 IDT_10nt_564 (TTCACACAGT-AACACTGAAG)",
+ "C10 IDT_10nt_578 (CTGCGTACTC-TTGCCATCAG)",
+ "D10 IDT_10nt_591 (CAGAACGTGG-CGGTGAAGTC)",
+ "E10 IDT_10nt_604 (TGGATATGGC-ATACGCGCCA)",
+ "F10 IDT_10nt_616 (TATAACTCCG-AGGACACATA)",
+ "G10 IDT_10nt_631 (CTCTCATGCG-CAACCGACGT)",
+ "H10 IDT_10nt_647 (CAACCGATTA-TAACTCAGCA)",
+ "A11 IDT_10nt_551 (AATGTGGCGT-CTACCTGACA)",
+ "B11 IDT_10nt_566 (GCTACTGTCG-ATGGAGCAAG)",
+ "C11 IDT_10nt_579 (CGCAGAACTT-GACGTCGATA)",
+ "D11 IDT_10nt_592 (CCAAGACACT-CCATGTCGGT)",
+ "E11 IDT_10nt_605 (GTTCCACCGA-TAACAGCAGG)",
+ "F11 IDT_10nt_617 (GACGTTCTCT-TAGCTCTACT)",
+ "G11 IDT_10nt_632 (GCATACTCTA-TATCTCGCTA)",
+ "H11 IDT_10nt_648 (GAGCTCCACA-TTCTGTTCTG)",
+ "A12 IDT_10nt_552 (TCGTGGCTAG-TACTGCTGGC)",
+ "B12 IDT_10nt_567 (AGGACTGAAC-CGTCGCTTCA)",
+ "C12 IDT_10nt_580 (GCTGAGAGCT-CACCTATCAC)",
+ "D12 IDT_10nt_593 (GAGCTTATGA-AATGACTGGT)",
+ "E12 IDT_10nt_606 (AATGGTACTG-TAACTGCTGT)",
+ "F12 IDT_10nt_618 (CAGCCTTGAG-ATGCTACTCA)",
+ "G12 IDT_10nt_634 (TCACCAGGAC-GAGACAAGGA)",
+ "H12 IDT_10nt_649 (TAATCTACGG-TAAGTGCCAG)",
+ ],
+ IndexEnum.IDT_DS_F: [
+ "A01 IDT_10nt_1402 (ACCTATACCT-AATGACCTGA)",
+ "B01 IDT_10nt_1418 (TCAATGGCGG-TCACCGTATA)",
+ "C01 IDT_10nt_1432 (ACCGCAATTA-TTGGTCAATC)",
+ "D01 IDT_10nt_1447 (CATTGAAGGA-ACGAACCATT)",
+ "E01 IDT_10nt_1461 (GCTCACGTTC-CTGAAGCTTA)",
+ "F01 IDT_10nt_1474 (CTCAGGTGTC-CAGTCTTCGG)",
+ "G01 IDT_10nt_1487 (TGCGTAAGGT-CTACCACGAA)",
+ "H01 IDT_10nt_1501 (TCTCGTATGC-AGTACGTGAA)",
+ "A02 IDT_10nt_1403 (CAAGGCATGC-TTGTACGGCT)",
+ "B02 IDT_10nt_1419 (CAGATTAGTC-ACATCCTCAC)",
+ "C02 IDT_10nt_1433 (CACCAAGAAC-AGTAACCACT)",
+ "D02 IDT_10nt_1448 (AGGATCAAGA-TCGGCTATGA)",
+ "E02 IDT_10nt_1462 (TGCCAACCAC-TATCAGTGCA)",
+ "F02 IDT_10nt_1475 (AGGCGAATTC-TAGTGAATCG)",
+ "G02 IDT_10nt_1488 (ACCGGAGATG-GCAGAGCACA)",
+ "H02 IDT_10nt_1503 (ACCTCACATA-TACAGCCTTG)",
+ "A03 IDT_10nt_1404 (CGGTAATGAA-GATCGCCTCA)",
+ "B03 IDT_10nt_1421 (ACCTCTGACA-TGTTACCTCA)",
+ "C03 IDT_10nt_1434 (TCGAGCATAG-CAGATCTAGG)",
+ "D03 IDT_10nt_1449 (TCTCAGCGGT-GCGAGCCATA)",
+ "E03 IDT_10nt_1463 (TTAGTGGTCA-TGGATGGATA)",
+ "F03 IDT_10nt_1477 (AGATTGTGCG-TTCCAGAGAA)",
+ "G03 IDT_10nt_1491 (GAGGACGATC-AGCTACCAAT)",
+ "H03 IDT_10nt_1504 (AGCAGCACAA-CCTACTTCTA)",
+ "A04 IDT_10nt_1406 (GTTACTGGAC-GCCGAATATC)",
+ "B04 IDT_10nt_1422 (AACTCATGTC-GAAGGCCTAA)",
+ "C04 IDT_10nt_1435 (ATGCTCTCTT-ATCCAGGTTA)",
+ "D04 IDT_10nt_1450 (CTTAACAAGG-CATGTTCTGT)",
+ "E04 IDT_10nt_1464 (ACTGTATCAC-AGAGTAGTCA)",
+ "F04 IDT_10nt_1478 (CTCTCCAACG-TATGGAAGTG)",
+ "G04 IDT_10nt_1492 (TAGGTGATCT-ATAGAACACG)",
+ "H04 IDT_10nt_1506 (TAAGTCTGGC-CTCCAGCTTC)",
+ "A05 IDT_10nt_1408 (GCAGGTAATA-GCTCTGAAGG)",
+ "B05 IDT_10nt_1423 (TGGAATACCA-TACGAATCGA)",
+ "C05 IDT_10nt_1437 (CGACTTATGC-AACCTCAGCG)",
+ "D05 IDT_10nt_1452 (TTACGTGCCG-CAACTCGTAA)",
+ "E05 IDT_10nt_1465 (ACGGAGTTGA-GTTCCGACGT)",
+ "F05 IDT_10nt_1479 (ACTTAGCCAT-GATCGTGCTC)",
+ "G05 IDT_10nt_1493 (CACCGGTGAT-GCCAATACAA)",
+ "H05 IDT_10nt_1507 (AGGAATTGTC-GTCAACTCGG)",
+ "A06 IDT_10nt_1410 (GTGTAGATGT-GTTCATGACT)",
+ "B06 IDT_10nt_1424 (TGTTCGCGAC-AGCTTAGCTC)",
+ "C06 IDT_10nt_1438 (CTCGTTACAA-AATGCACCAA)",
+ "D06 IDT_10nt_1453 (TGATGGAATC-CCTTCATAGG)",
+ "E06 IDT_10nt_1466 (GTAACTTAGC-ACGCGAGTCA)",
+ "F06 IDT_10nt_1480 (AACACCGCAA-CCACATGACG)",
+ "G06 IDT_10nt_1494 (GTAACCATCA-CCATAGTCGC)",
+ "H06 IDT_10nt_1508 (AGGCAACGGA-ATCCAAGTAG)",
+ "A07 IDT_10nt_1411 (TGATTGGAGA-CTTGGCCTCT)",
+ "B07 IDT_10nt_1425 (GATGAGCGGT-TGTTAGAGGT)",
+ "C07 IDT_10nt_1439 (AATCATGGAG-GCCACTTCCT)",
+ "D07 IDT_10nt_1454 (ACGGAAGCAA-CGGTTGCAGT)",
+ "E07 IDT_10nt_1467 (ACTTGCCTGT-CCAAGAAGAT)",
+ "F07 IDT_10nt_1481 (CCAGTCACAC-CACGACTAGC)",
+ "G07 IDT_10nt_1495 (CTCTGAGGTA-CCATATACGA)",
+ "H07 IDT_10nt_1509 (CTAACGAGGT-CGAAGCCAAG)",
+ "A08 IDT_10nt_1412 (CAAGTGCGTC-TCATCGGAAT)",
+ "B08 IDT_10nt_1426 (AATGTCGGTT-GTTCCTCATT)",
+ "C08 IDT_10nt_1440 (AATGGCTCGG-CTGGTATTAG)",
+ "D08 IDT_10nt_1456 (GTCTGATCGT-GCTTGAGACG)",
+ "E08 IDT_10nt_1468 (TCGTCCTGAC-CTGTGAAGAA)",
+ "F08 IDT_10nt_1482 (CCTCCAACCA-GCCTGTACTG)",
+ "G08 IDT_10nt_1496 (TCGGTACCTC-GTGGTGACTG)",
+ "H08 IDT_10nt_1510 (GATTGAGTGT-ACACAACATG)",
+ "A09 IDT_10nt_1413 (AGAAGATGCA-ACCTTAGCCG)",
+ "B09 IDT_10nt_1427 (TTCTCAAGTG-TGGCTTATCA)",
+ "C09 IDT_10nt_1442 (GCATATCACT-ATCTCCAGAT)",
+ "D09 IDT_10nt_1457 (CTACGTCTAG-GTGGACCTTG)",
+ "E09 IDT_10nt_1469 (AACAGTAGCA-TACGCAACGG)",
+ "F09 IDT_10nt_1483 (CGTGTTAGAG-TCTTACCGAA)",
+ "G09 IDT_10nt_1497 (CCTTGGTGCA-ACGTGTAGTA)",
+ "H09 IDT_10nt_1511 (GACAGCAGCT-ATGACTCCTC)",
+ "A10 IDT_10nt_1415 (ACTCACCGAG-CTCCTGCGTA)",
+ "B10 IDT_10nt_1428 (CCATTGTCAA-ACTTAAGGAC)",
+ "C10 IDT_10nt_1443 (CACGTTCGTG-AGAAGTAGGA)",
+ "D10 IDT_10nt_1458 (GTTGTCGTAT-GAGGAGTATG)",
+ "E10 IDT_10nt_1470 (TAGTGATAGC-GCAGCATAAT)",
+ "F10 IDT_10nt_1484 (CACTCTGAAC-GTGATAAGCT)",
+ "G10 IDT_10nt_1498 (GCGCAATTGT-GCATCGGTTC)",
+ "H10 IDT_10nt_1512 (GTAGGAACCG-ATGGTCAGTA)",
+ "A11 IDT_10nt_1416 (ATTAGGCACG-ACAAGCAATG)",
+ "B11 IDT_10nt_1429 (TCCTCGGATA-CGCGGAGATT)",
+ "C11 IDT_10nt_1444 (AGATCGAAGT-TGGACGTGCT)",
+ "D11 IDT_10nt_1459 (GAACTACACT-GCTTACACTT)",
+ "E11 IDT_10nt_1472 (CGAACTTGAG-TCCACTGCGT)",
+ "F11 IDT_10nt_1485 (GTGGAACAAT-AGACAGGAAT)",
+ "G11 IDT_10nt_1499 (ATTCTTCCGG-GAACGGCCAT)",
+ "H11 IDT_10nt_1513 (ATGAGGTAAC-GCGTACGACA)",
+ "A12 IDT_10nt_1417 (GAAGTATCGT-GCTGCTCTAT)",
+ "B12 IDT_10nt_1430 (AAGACGACGC-TTATGGTGGT)",
+ "C12 IDT_10nt_1445 (CTCAGGACAT-AGGCCTCCAA)",
+ "D12 IDT_10nt_1460 (AACCGAACCT-GTCGATGATT)",
+ "E12 IDT_10nt_1473 (GTGTACCGTT-TTGAGCCTGG)",
+ "F12 IDT_10nt_1486 (CTAAGTTGGT-AGATTCAGCA)",
+ "G12 IDT_10nt_1500 (CCATGCAGTT-GAGACAGAGC)",
+ "H12 IDT_10nt_1514 (TTACTGCCTT-GCTCGGAACT)",
+ ],
+ IndexEnum.IDT_XGEN_UDI: [
+ "A1 xGen UDI Index 1 (CTGATCGT-ATATGCGC)",
+ "B1 xGen UDI Index 2 (ACTCTCGA-TGGTACAG)",
+ "C1 xGen UDI Index 3 (TGAGCTAG-AACCGTTC)",
+ "D1 xGen UDI Index 4 (GAGACGAT-TAACCGGT)",
+ "E1 xGen UDI Index 5 (CTTGTCGA-GAACATCG)",
+ "F1 xGen UDI Index 6 (TTCCAAGG-CCTTGTAG)",
+ "G1 xGen UDI Index 7 (CGCATGAT-TCAGGCTT)",
+ "H1 xGen UDI Index 8 (ACGGAACA-GTTCTCGT)",
+ "A2 xGen UDI Index 9 (CGGCTAAT-AGAACGAG)",
+ "B2 xGen UDI Index 10 (ATCGATCG-TGCTTCCA)",
+ "C2 xGen UDI Index 11 (GCAAGATC-CTTCGACT)",
+ "D2 xGen UDI Index 12 (GCTATCCT-CACCTGTT)",
+ "E2 xGen UDI Index 13 (TACGCTAC-ATCACACG)",
+ "F2 xGen UDI Index 14 (TGGACTCT-CCGTAAGA)",
+ "G2 xGen UDI Index 15 (AGAGTAGC-TACGCCTT)",
+ "H2 xGen UDI Index 16 (ATCCAGAG-CGACGTTA)",
+ "A3 xGen UDI Index 17 (GACGATCT-ATGCACGA)",
+ "B3 xGen UDI Index 18 (AACTGAGC-CCTGATTG)",
+ "C3 xGen UDI Index 19 (CTTAGGAC-GTAGGAGT)",
+ "D3 xGen UDI Index 20 (GTGCCATA-ACTAGGAG)",
+ "E3 xGen UDI Index 21 (GAATCCGA-CACTAGCT)",
+ "F3 xGen UDI Index 22 (TCGCTGTT-ACGACTTG)",
+ "G3 xGen UDI Index 23 (TTCGTTGG-CGTGTGTA)",
+ "H3 xGen UDI Index 24 (AAGCACTG-GTTGACCT)",
+ "A4 xGen UDI Index 25 (CCTTGATC-ACTCCATC)",
+ "B4 xGen UDI Index 26 (GTCGAAGA-CAATGTGG)",
+ "C4 xGen UDI Index 27 (ACCACGAT-TTGCAGAC)",
+ "D4 xGen UDI Index 28 (GATTACCG-CAGTCCAA)",
+ "E4 xGen UDI Index 29 (GCACAACT-ACGTTCAG)",
+ "F4 xGen UDI Index 30 (GCGTCATT-AACGTCTG)",
+ "G4 xGen UDI Index 31 (ATCCGGTA-TATCGGTC)",
+ "H4 xGen UDI Index 32 (CGTTGCAA-CGCTCTAT)",
+ "A5 xGen UDI Index 33 (GTGAAGTG-GATTGCTC)",
+ "B5 xGen UDI Index 34 (CATGGCTA-GATGTGTG)",
+ "C5 xGen UDI Index 35 (ATGCCTGT-CGCAATCT)",
+ "D5 xGen UDI Index 36 (CAACACCT-TGGTAGCT)",
+ "E5 xGen UDI Index 37 (TGTGACTG-GATAGGCT)",
+ "F5 xGen UDI Index 38 (GTCATCGA-AGTGGATC)",
+ "G5 xGen UDI Index 39 (AGCACTTC-TTGGACGT)",
+ "H5 xGen UDI Index 40 (GAAGGAAG-ATGACGTC)",
+ "A6 xGen UDI Index 41 (GTTGTTCG-GAAGTTGG)",
+ "B6 xGen UDI Index 42 (CGGTTGTT-CATACCAC)",
+ "C6 xGen UDI Index 43 (ACTGAGGT-CTGTTGAC)",
+ "D6 xGen UDI Index 44 (TGAAGACG-TGGCATGT)",
+ "E6 xGen UDI Index 45 (GTTACGCA-ATCGCCAT)",
+ "F6 xGen UDI Index 46 (AGCGTGTT-TTGCGAAG)",
+ "G6 xGen UDI Index 47 (GATCGAGT-AGTTCGTC)",
+ "H6 xGen UDI Index 48 (ACAGCTCA-GAGCAGTA)",
+ "A7 xGen UDI Index 49 (GAGCAGTA-ACAGCTCA)",
+ "B7 xGen UDI Index 50 (AGTTCGTC-GATCGAGT)",
+ "C7 xGen UDI Index 51 (TTGCGAAG-AGCGTGTT)",
+ "D7 xGen UDI Index 52 (ATCGCCAT-GTTACGCA)",
+ "E7 xGen UDI Index 53 (TGGCATGT-TGAAGACG)",
+ "F7 xGen UDI Index 54 (CTGTTGAC-ACTGAGGT)",
+ "G7 xGen UDI Index 55 (CATACCAC-CGGTTGTT)",
+ "H7 xGen UDI Index 56 (GAAGTTGG-GTTGTTCG)",
+ "A8 xGen UDI Index 57 (ATGACGTC-GAAGGAAG)",
+ "B8 xGen UDI Index 58 (TTGGACGT-AGCACTTC)",
+ "C8 xGen UDI Index 59 (AGTGGATC-GTCATCGA)",
+ "D8 xGen UDI Index 60 (GATAGGCT-TGTGACTG)",
+ "E8 xGen UDI Index 61 (TGGTAGCT-CAACACCT)",
+ "F8 xGen UDI Index 62 (CGCAATCT-ATGCCTGT)",
+ "G8 xGen UDI Index 63 (GATGTGTG-CATGGCTA)",
+ "H8 xGen UDI Index 64 (GATTGCTC-GTGAAGTG)",
+ "A9 xGen UDI Index 65 (CGCTCTAT-CGTTGCAA)",
+ "B9 xGen UDI Index 66 (TATCGGTC-ATCCGGTA)",
+ "C9 xGen UDI Index 67 (AACGTCTG-GCGTCATT)",
+ "D9 xGen UDI Index 68 (ACGTTCAG-GCACAACT)",
+ "E9 xGen UDI Index 69 (CAGTCCAA-GATTACCG)",
+ "F9 xGen UDI Index 70 (TTGCAGAC-ACCACGAT)",
+ "G9 xGen UDI Index 71 (CAATGTGG-GTCGAAGA)",
+ "H9 xGen UDI Index 72 (ACTCCATC-CCTTGATC)",
+ "A10 xGen UDI Index 73 (GTTGACCT-AAGCACTG)",
+ "B10 xGen UDI Index 74 (CGTGTGTA-TTCGTTGG)",
+ "C10 xGen UDI Index 75 (ACGACTTG-TCGCTGTT)",
+ "D10 xGen UDI Index 76 (CACTAGCT-GAATCCGA)",
+ "E10 xGen UDI Index 77 (ACTAGGAG-GTGCCATA)",
+ "F10 xGen UDI Index 78 (GTAGGAGT-CTTAGGAC)",
+ "G10 xGen UDI Index 79 (CCTGATTG-AACTGAGC)",
+ "H10 xGen UDI Index 80 (ATGCACGA-GACGATCT)",
+ "A11 xGen UDI Index 81 (CGACGTTA-ATCCAGAG)",
+ "B11 xGen UDI Index 82 (TACGCCTT-AGAGTAGC)",
+ "C11 xGen UDI Index 83 (CCGTAAGA-TGGACTCT)",
+ "D11 xGen UDI Index 84 (ATCACACG-TACGCTAC)",
+ "E11 xGen UDI Index 85 (CACCTGTT-GCTATCCT)",
+ "F11 xGen UDI Index 86 (CTTCGACT-GCAAGATC)",
+ "G11 xGen UDI Index 87 (TGCTTCCA-ATCGATCG)",
+ "H11 xGen UDI Index 88 (AGAACGAG-CGGCTAAT)",
+ "A12 xGen UDI Index 89 (GTTCTCGT-ACGGAACA)",
+ "B12 xGen UDI Index 90 (TCAGGCTT-CGCATGAT)",
+ "C12 xGen UDI Index 91 (CCTTGTAG-TTCCAAGG)",
+ "D12 xGen UDI Index 92 (GAACATCG-CTTGTCGA)",
+ "E12 xGen UDI Index 93 (TAACCGGT-GAGACGAT)",
+ "F12 xGen UDI Index 94 (AACCGTTC-TGAGCTAG)",
+ "G12 xGen UDI Index 95 (TGGTACAG-ACTCTCGA)",
+ "H12 xGen UDI Index 96 (ATATGCGC-CTGATCGT)",
+ ],
+ IndexEnum.KAPA_UDI_NIPT: [
+ "A01 UDI0001 (GTAACATC-CAGCGATT)",
+ "B01 UDI0002 (AGGTAAGG-CACGATTC)",
+ "C01 UDI0003 (ACAGGTAT-GCCACCAT)",
+ "D01 UDI0004 (AATGTTCT-AGTCACCT)",
+ "E01 UDI0005 (TCTGCAAG-TTCACCTT)",
+ "F01 UDI0006 (CAGCGGTA-TGACTTGG)",
+ "G01 UDI0007 (CGCCTTCC-GCGGACTT)",
+ "H01 UDI0008 (CAATAGTC-CAGCTCAC)",
+ "A02 UDI0009 (ATTATCAA-CGACTCTC)",
+ "B02 UDI0010 (CCAACATT-GCTCTCTT)",
+ "C02 UDI0011 (GCCTAGCC-TTGGTCTG)",
+ "D02 UDI0012 (GACCAGGA-CTGGCTAT)",
+ "E02 UDI0013 (CTGTAATC-AATTGCTT)",
+ "F02 UDI0014 (ACTAAGAC-TTCCAGCT)",
+ "G02 UDI0015 (TCGCTAGA-AGTACTGC)",
+ "H02 UDI0016 (AACGCATT-GCAGGTTG)",
+ "A03 UDI0017 (TGCTGCTG-GTCCTCAT)",
+ "B03 UDI0018 (TATCTGCC-CCAACGCT)",
+ "C03 UDI0019 (ATTCCTCT-GCGATATT)",
+ "D03 UDI0020 (CAACTCTC-ATCTTCTC)",
+ "E03 UDI0021 (GCCGTCGA-TTAATCAC)",
+ "F03 UDI0022 (TATCCAGG-TCCACTTC)",
+ "G03 UDI0023 (TAAGCACA-GACATTAA)",
+ "H03 UDI0024 (GTCCACAG-CGCGAATA)",
+ "A04 UDI0025 (ACACGATC-AATACCAT)",
+ "B04 UDI0026 (GTATAACA-TGCTTCAC)",
+ "C04 UDI0027 (TGTCGGAT-TCAGGCTT)",
+ "D04 UDI0028 (AGGATCTA-GAACTTCG)",
+ "E04 UDI0029 (AGCAATTC-CTGCTCCT)",
+ "F04 UDI0030 (CCTATGCC-CAAGCTTA)",
+ "G04 UDI0031 (AAGGATGT-CACTTCAT)",
+ "H04 UDI0032 (TTGAGCCT-TCATTCGA)",
+ "A05 UDI0033 (CACATCCT-GCTGCACT)",
+ "B05 UDI0034 (TTCGCTGA-CGCATATT)",
+ "C05 UDI0035 (CATGCTTA-ATGAATTA)",
+ "D05 UDI0036 (AAGTAGAG-ATCGACTG)",
+ "E05 UDI0037 (CATAGCGA-GACGGTTA)",
+ "F05 UDI0038 (AGTTGCTT-TAGCATTG)",
+ "G05 UDI0039 (GCACATCT-AACCTCTT)",
+ "H05 UDI0040 (CCTACCAT-GCTTCCTA)",
+ "A06 UDI0041 (TGCTCGAC-ATCCTTAA)",
+ "B06 UDI0042 (CCAGTTAG-CCTGTCAT)",
+ "C06 UDI0043 (TGTTCCGA-TTAGCCAG)",
+ "D06 UDI0044 (GGTCCAGA-CGGTTCTT)",
+ "E06 UDI0045 (TCGGAATG-CTACATTG)",
+ "F06 UDI0046 (ATAGCGTC-TACTCCAG)",
+ "G06 UDI0047 (AACTTGAC-GCTAGCAG)",
+ "H06 UDI0048 (ATTCTAGG-TTCTTGGC)",
+ "A07 UDI0049 (TTGAATAG-TCCATAAC)",
+ "B07 UDI0050 (TCTGGCGA-AATTCAAC)",
+ "C07 UDI0051 (TAATGAAC-CTTGGCTT)",
+ "D07 UDI0052 (ATTATGTT-CTGTATTC)",
+ "E07 UDI0053 (ATTGTCTG-TTCACAGA)",
+ "F07 UDI0054 (GAAGAAGT-CTATTAGC)",
+ "G07 UDI0055 (GACAGTAA-GCGATTAC)",
+ "H07 UDI0056 (CCTTCGCA-CATCACTT)",
+ "A08 UDI0057 (CATGATCG-TACTCTCC)",
+ "B08 UDI0058 (TCCTTGGT-GAATCGAC)",
+ "C08 UDI0059 (GTCATCTA-TCCAACCA)",
+ "D08 UDI0060 (GAACCTAG-CTGGTATT)",
+ "E08 UDI0061 (CAGCAAGG-CCTCTAAC)",
+ "F08 UDI0062 (CGTTACCA-GAACGCTA)",
+ "G08 UDI0063 (TCCAGCAA-AATTGGCC)",
+ "H08 UDI0064 (CAGGAGCC-GTCCAATC)",
+ "A09 UDI0065 (TTACGCAC-GACCATCT)",
+ "B09 UDI0066 (AGGTTATC-ATCATACC)",
+ "C09 UDI0067 (TCGCCTTG-GCTGATTC)",
+ "D09 UDI0068 (CCAGAGCT-CGAACTTC)",
+ "E09 UDI0069 (TACTTAGC-AGGTACCA)",
+ "F09 UDI0070 (GTCTGATG-ATATCCGA)",
+ "G09 UDI0071 (TCTCGGTC-CTGACATC)",
+ "H09 UDI0072 (AAGACACT-TGACAGCA)",
+ "A10 UDI0073 (CTACCAGG-CAACTGAT)",
+ "B10 UDI0074 (ACTGTATC-TGCTATTA)",
+ "C10 UDI0075 (CTGTGGCG-CACTAGCC)",
+ "D10 UDI0076 (TGTAATCA-AATCTCCA)",
+ "E10 UDI0077 (TTATATCT-GTCTGCAC)",
+ "F10 UDI0078 (GCCGCAAC-TCATGTCT)",
+ "G10 UDI0079 (TGTAACTC-CGACAGTT)",
+ "H10 UDI0080 (CTGCGGAT-GGTTATCT)",
+ "A11 UDI0081 (GACCGTTG-CCATCACA)",
+ "B11 UDI0082 (AACAATGG-TAGTTAGC)",
+ "C11 UDI0083 (AGGTGCGA-CTTCTGGC)",
+ "D11 UDI0084 (AGGTCGCA-GCACAATT)",
+ "E11 UDI0085 (ACCAACTG-GGCAATAC)",
+ "F11 UDI0086 (TGCAAGTA-CCAACTAA)",
+ "G11 UDI0087 (GACCTAAC-GCTCACCA)",
+ "H11 UDI0088 (AGCATGGA-AGCGCTAA)",
+ "A12 UDI0089 (ACAGTTGA-GCTCCGAT)",
+ "B12 UDI0090 (TTGTCTAT-CTTGAATC)",
+ "C12 UDI0091 (CGCTATGT-TCCGCATA)",
+ "D12 UDI0092 (TTAATCAG-CCAATCTG)",
+ "E12 UDI0093 (CTATGCGT-GAATATCA)",
+ "F12 UDI0094 (GATATCCA-GGATTAAC)",
+ "G12 UDI0095 (GAAGGAAG-CATCCTGG)",
+ "H12 UDI0096 (CTAACTCG-TATGGTTC)",
+ ],
+ IndexEnum.NEXTERA_XT: [
+ "N701-S502 (TAAGGCGA-CTCTCTAT)",
+ "N701-S503 (TAAGGCGA-TATCCTCT)",
+ "N701-S505 (TAAGGCGA-GTAAGGAG)",
+ "N701-S506 (TAAGGCGA-ACTGCATA)",
+ "N701-S507 (TAAGGCGA-AAGGAGTA)",
+ "N701-S508 (TAAGGCGA-CTAAGCCT)",
+ "N701-S510 (TAAGGCGA-CGTCTAAT)",
+ "N701-S511 (TAAGGCGA-TCTCTCCG)",
+ "N702-S502 (CGTACTAG-CTCTCTAT)",
+ "N702-S503 (CGTACTAG-TATCCTCT)",
+ "N702-S505 (CGTACTAG-GTAAGGAG)",
+ "N702-S506 (CGTACTAG-ACTGCATA)",
+ "N702-S507 (CGTACTAG-AAGGAGTA)",
+ "N702-S508 (CGTACTAG-CTAAGCCT)",
+ "N702-S510 (CGTACTAG-CGTCTAAT)",
+ "N702-S511 (CGTACTAG-TCTCTCCG)",
+ "N703-S502 (AGGCAGAA-CTCTCTAT)",
+ "N703-S503 (AGGCAGAA-TATCCTCT)",
+ "N703-S505 (AGGCAGAA-GTAAGGAG)",
+ "N703-S506 (AGGCAGAA-ACTGCATA)",
+ "N703-S507 (AGGCAGAA-AAGGAGTA)",
+ "N703-S508 (AGGCAGAA-CTAAGCCT)",
+ "N703-S510 (AGGCAGAA-CGTCTAAT)",
+ "N703-S511 (AGGCAGAA-TCTCTCCG)",
+ "N704-S502 (TCCTGAGC-CTCTCTAT)",
+ "N704-S503 (TCCTGAGC-TATCCTCT)",
+ "N704-S505 (TCCTGAGC-GTAAGGAG)",
+ "N704-S506 (TCCTGAGC-ACTGCATA)",
+ "N704-S507 (TCCTGAGC-AAGGAGTA)",
+ "N704-S508 (TCCTGAGC-CTAAGCCT)",
+ "N704-S510 (TCCTGAGC-CGTCTAAT)",
+ "N704-S511 (TCCTGAGC-TCTCTCCG)",
+ "N705-S502 (GGACTCCT-CTCTCTAT)",
+ "N705-S503 (GGACTCCT-TATCCTCT)",
+ "N705-S505 (GGACTCCT-GTAAGGAG)",
+ "N705-S506 (GGACTCCT-ACTGCATA)",
+ "N705-S507 (GGACTCCT-AAGGAGTA)",
+ "N705-S508 (GGACTCCT-CTAAGCCT)",
+ "N705-S510 (GGACTCCT-CGTCTAAT)",
+ "N705-S511 (GGACTCCT-TCTCTCCG)",
+ "N706-S502 (TAGGCATG-CTCTCTAT)",
+ "N706-S503 (TAGGCATG-TATCCTCT)",
+ "N706-S505 (TAGGCATG-GTAAGGAG)",
+ "N706-S506 (TAGGCATG-ACTGCATA)",
+ "N706-S507 (TAGGCATG-AAGGAGTA)",
+ "N706-S508 (TAGGCATG-CTAAGCCT)",
+ "N706-S510 (TAGGCATG-CGTCTAAT)",
+ "N706-S511 (TAGGCATG-TCTCTCCG)",
+ "N707-S502 (CTCTCTAC-CTCTCTAT)",
+ "N707-S503 (CTCTCTAC-TATCCTCT)",
+ "N707-S505 (CTCTCTAC-GTAAGGAG)",
+ "N707-S506 (CTCTCTAC-ACTGCATA)",
+ "N707-S507 (CTCTCTAC-AAGGAGTA)",
+ "N707-S508 (CTCTCTAC-CTAAGCCT)",
+ "N707-S510 (CTCTCTAC-CGTCTAAT)",
+ "N707-S511 (CTCTCTAC-TCTCTCCG)",
+ "N710-S502 (CGAGGCTG-CTCTCTAT)",
+ "N710-S503 (CGAGGCTG-TATCCTCT)",
+ "N710-S505 (CGAGGCTG-GTAAGGAG)",
+ "N710-S506 (CGAGGCTG-ACTGCATA)",
+ "N710-S507 (CGAGGCTG-AAGGAGTA)",
+ "N710-S508 (CGAGGCTG-CTAAGCCT)",
+ "N710-S510 (CGAGGCTG-CGTCTAAT)",
+ "N710-S511 (CGAGGCTG-TCTCTCCG)",
+ "N711-S502 (AAGAGGCA-CTCTCTAT)",
+ "N711-S503 (AAGAGGCA-TATCCTCT)",
+ "N711-S505 (AAGAGGCA-GTAAGGAG)",
+ "N711-S506 (AAGAGGCA-ACTGCATA)",
+ "N711-S507 (AAGAGGCA-AAGGAGTA)",
+ "N711-S508 (AAGAGGCA-CTAAGCCT)",
+ "N711-S510 (AAGAGGCA-CGTCTAAT)",
+ "N711-S511 (AAGAGGCA-TCTCTCCG)",
+ "N712-S502 (GTAGAGGA-CTCTCTAT)",
+ "N712-S503 (GTAGAGGA-TATCCTCT)",
+ "N712-S505 (GTAGAGGA-GTAAGGAG)",
+ "N712-S506 (GTAGAGGA-ACTGCATA)",
+ "N712-S507 (GTAGAGGA-AAGGAGTA)",
+ "N712-S508 (GTAGAGGA-CTAAGCCT)",
+ "N712-S510 (GTAGAGGA-CGTCTAAT)",
+ "N712-S511 (GTAGAGGA-TCTCTCCG)",
+ "N714-S502 (GCTCATGA-CTCTCTAT)",
+ "N714-S503 (GCTCATGA-TATCCTCT)",
+ "N714-S505 (GCTCATGA-GTAAGGAG)",
+ "N714-S506 (GCTCATGA-ACTGCATA)",
+ "N714-S507 (GCTCATGA-AAGGAGTA)",
+ "N714-S508 (GCTCATGA-CTAAGCCT)",
+ "N714-S510 (GCTCATGA-CGTCTAAT)",
+ "N714-S511 (GCTCATGA-TCTCTCCG)",
+ "N715-S502 (ATCTCAGG-CTCTCTAT)",
+ "N715-S503 (ATCTCAGG-TATCCTCT)",
+ "N715-S505 (ATCTCAGG-GTAAGGAG)",
+ "N715-S506 (ATCTCAGG-ACTGCATA)",
+ "N715-S507 (ATCTCAGG-AAGGAGTA)",
+ "N715-S508 (ATCTCAGG-CTAAGCCT)",
+ "N715-S510 (ATCTCAGG-CGTCTAAT)",
+ "N715-S511 (ATCTCAGG-TCTCTCCG)",
+ "N701-S513 (TAAGGCGA-TCGACTAG)",
+ "N701-S515 (TAAGGCGA-TTCTAGCT)",
+ "N701-S516 (TAAGGCGA-CCTAGAGT)",
+ "N701-S517 (TAAGGCGA-GCGTAAGA)",
+ "N701-S518 (TAAGGCGA-CTATTAAG)",
+ "N701-S520 (TAAGGCGA-AAGGCTAT)",
+ "N701-S521 (TAAGGCGA-GAGCCTTA)",
+ "N701-S522 (TAAGGCGA-TTATGCGA)",
+ "N702-S513 (CGTACTAG-TCGACTAG)",
+ "N702-S515 (CGTACTAG-TTCTAGCT)",
+ "N702-S516 (CGTACTAG-CCTAGAGT)",
+ "N702-S517 (CGTACTAG-GCGTAAGA)",
+ "N702-S518 (CGTACTAG-CTATTAAG)",
+ "N702-S520 (CGTACTAG-AAGGCTAT)",
+ "N702-S521 (CGTACTAG-GAGCCTTA)",
+ "N702-S522 (CGTACTAG-TTATGCGA)",
+ "N703-S513 (AGGCAGAA-TCGACTAG)",
+ "N703-S515 (AGGCAGAA-TTCTAGCT)",
+ "N703-S516 (AGGCAGAA-CCTAGAGT)",
+ "N703-S517 (AGGCAGAA-GCGTAAGA)",
+ "N703-S518 (AGGCAGAA-CTATTAAG)",
+ "N703-S520 (AGGCAGAA-AAGGCTAT)",
+ "N703-S521 (AGGCAGAA-GAGCCTTA)",
+ "N703-S522 (AGGCAGAA-TTATGCGA)",
+ "N704-S513 (TCCTGAGC-TCGACTAG)",
+ "N704-S515 (TCCTGAGC-TTCTAGCT)",
+ "N704-S516 (TCCTGAGC-CCTAGAGT)",
+ "N704-S517 (TCCTGAGC-GCGTAAGA)",
+ "N704-S518 (TCCTGAGC-CTATTAAG)",
+ "N704-S520 (TCCTGAGC-AAGGCTAT)",
+ "N704-S521 (TCCTGAGC-GAGCCTTA)",
+ "N704-S522 (TCCTGAGC-TTATGCGA)",
+ "N705-S513 (GGACTCCT-TCGACTAG)",
+ "N705-S515 (GGACTCCT-TTCTAGCT)",
+ "N705-S516 (GGACTCCT-CCTAGAGT)",
+ "N705-S517 (GGACTCCT-GCGTAAGA)",
+ "N705-S518 (GGACTCCT-CTATTAAG)",
+ "N705-S520 (GGACTCCT-AAGGCTAT)",
+ "N705-S521 (GGACTCCT-GAGCCTTA)",
+ "N705-S522 (GGACTCCT-TTATGCGA)",
+ "N706-S513 (TAGGCATG-TCGACTAG)",
+ "N706-S515 (TAGGCATG-TTCTAGCT)",
+ "N706-S516 (TAGGCATG-CCTAGAGT)",
+ "N706-S517 (TAGGCATG-GCGTAAGA)",
+ "N706-S518 (TAGGCATG-CTATTAAG)",
+ "N706-S520 (TAGGCATG-AAGGCTAT)",
+ "N706-S521 (TAGGCATG-GAGCCTTA)",
+ "N706-S522 (TAGGCATG-TTATGCGA)",
+ "N707-S513 (CTCTCTAC-TCGACTAG)",
+ "N707-S515 (CTCTCTAC-TTCTAGCT)",
+ "N707-S516 (CTCTCTAC-CCTAGAGT)",
+ "N707-S517 (CTCTCTAC-GCGTAAGA)",
+ "N707-S518 (CTCTCTAC-CTATTAAG)",
+ "N707-S520 (CTCTCTAC-AAGGCTAT)",
+ "N707-S521 (CTCTCTAC-GAGCCTTA)",
+ "N707-S522 (CTCTCTAC-TTATGCGA)",
+ "N710-S513 (CGAGGCTG-TCGACTAG)",
+ "N710-S515 (CGAGGCTG-TTCTAGCT)",
+ "N710-S516 (CGAGGCTG-CCTAGAGT)",
+ "N710-S517 (CGAGGCTG-GCGTAAGA)",
+ "N710-S518 (CGAGGCTG-CTATTAAG)",
+ "N710-S520 (CGAGGCTG-AAGGCTAT)",
+ "N710-S521 (CGAGGCTG-GAGCCTTA)",
+ "N710-S522 (CGAGGCTG-TTATGCGA)",
+ "N711-S513 (AAGAGGCA-TCGACTAG)",
+ "N711-S515 (AAGAGGCA-TTCTAGCT)",
+ "N711-S516 (AAGAGGCA-CCTAGAGT)",
+ "N711-S517 (AAGAGGCA-GCGTAAGA)",
+ "N711-S518 (AAGAGGCA-CTATTAAG)",
+ "N711-S520 (AAGAGGCA-AAGGCTAT)",
+ "N711-S521 (AAGAGGCA-GAGCCTTA)",
+ "N711-S522 (AAGAGGCA-TTATGCGA)",
+ "N712-S513 (GTAGAGGA-TCGACTAG)",
+ "N712-S515 (GTAGAGGA-TTCTAGCT)",
+ "N712-S516 (GTAGAGGA-CCTAGAGT)",
+ "N712-S517 (GTAGAGGA-GCGTAAGA)",
+ "N712-S518 (GTAGAGGA-CTATTAAG)",
+ "N712-S520 (GTAGAGGA-AAGGCTAT)",
+ "N712-S521 (GTAGAGGA-GAGCCTTA)",
+ "N712-S522 (GTAGAGGA-TTATGCGA)",
+ "N714-S513 (GCTCATGA-TCGACTAG)",
+ "N714-S515 (GCTCATGA-TTCTAGCT)",
+ "N714-S516 (GCTCATGA-CCTAGAGT)",
+ "N714-S517 (GCTCATGA-GCGTAAGA)",
+ "N714-S518 (GCTCATGA-CTATTAAG)",
+ "N714-S520 (GCTCATGA-AAGGCTAT)",
+ "N714-S521 (GCTCATGA-GAGCCTTA)",
+ "N714-S522 (GCTCATGA-TTATGCGA)",
+ "N715-S513 (ATCTCAGG-TCGACTAG)",
+ "N715-S515 (ATCTCAGG-TTCTAGCT)",
+ "N715-S516 (ATCTCAGG-CCTAGAGT)",
+ "N715-S517 (ATCTCAGG-GCGTAAGA)",
+ "N715-S518 (ATCTCAGG-CTATTAAG)",
+ "N715-S520 (ATCTCAGG-AAGGCTAT)",
+ "N715-S521 (ATCTCAGG-GAGCCTTA)",
+ "N715-S522 (ATCTCAGG-TTATGCGA)",
+ "N716-S502 (ACTCGCTA-CTCTCTAT)",
+ "N716-S503 (ACTCGCTA-TATCCTCT)",
+ "N716-S505 (ACTCGCTA-GTAAGGAG)",
+ "N716-S506 (ACTCGCTA-ACTGCATA)",
+ "N716-S507 (ACTCGCTA-AAGGAGTA)",
+ "N716-S508 (ACTCGCTA-CTAAGCCT)",
+ "N716-S510 (ACTCGCTA-CGTCTAAT)",
+ "N716-S511 (ACTCGCTA-TCTCTCCG)",
+ "N718-S502 (GGAGCTAC-CTCTCTAT)",
+ "N718-S503 (GGAGCTAC-TATCCTCT)",
+ "N718-S505 (GGAGCTAC-GTAAGGAG)",
+ "N718-S506 (GGAGCTAC-ACTGCATA)",
+ "N718-S507 (GGAGCTAC-AAGGAGTA)",
+ "N718-S508 (GGAGCTAC-CTAAGCCT)",
+ "N718-S510 (GGAGCTAC-CGTCTAAT)",
+ "N718-S511 (GGAGCTAC-TCTCTCCG)",
+ "N719-S502 (GCGTAGTA-CTCTCTAT)",
+ "N719-S503 (GCGTAGTA-TATCCTCT)",
+ "N719-S505 (GCGTAGTA-GTAAGGAG)",
+ "N719-S506 (GCGTAGTA-ACTGCATA)",
+ "N719-S507 (GCGTAGTA-AAGGAGTA)",
+ "N719-S508 (GCGTAGTA-CTAAGCCT)",
+ "N719-S510 (GCGTAGTA-CGTCTAAT)",
+ "N719-S511 (GCGTAGTA-TCTCTCCG)",
+ "N720-S502 (CGGAGCCT-CTCTCTAT)",
+ "N720-S503 (CGGAGCCT-TATCCTCT)",
+ "N720-S505 (CGGAGCCT-GTAAGGAG)",
+ "N720-S506 (CGGAGCCT-ACTGCATA)",
+ "N720-S507 (CGGAGCCT-AAGGAGTA)",
+ "N720-S508 (CGGAGCCT-CTAAGCCT)",
+ "N720-S510 (CGGAGCCT-CGTCTAAT)",
+ "N720-S511 (CGGAGCCT-TCTCTCCG)",
+ "N721-S502 (TACGCTGC-CTCTCTAT)",
+ "N721-S503 (TACGCTGC-TATCCTCT)",
+ "N721-S505 (TACGCTGC-GTAAGGAG)",
+ "N721-S506 (TACGCTGC-ACTGCATA)",
+ "N721-S507 (TACGCTGC-AAGGAGTA)",
+ "N721-S508 (TACGCTGC-CTAAGCCT)",
+ "N721-S510 (TACGCTGC-CGTCTAAT)",
+ "N721-S511 (TACGCTGC-TCTCTCCG)",
+ "N722-S502 (ATGCGCAG-CTCTCTAT)",
+ "N722-S503 (ATGCGCAG-TATCCTCT)",
+ "N722-S505 (ATGCGCAG-GTAAGGAG)",
+ "N722-S506 (ATGCGCAG-ACTGCATA)",
+ "N722-S507 (ATGCGCAG-AAGGAGTA)",
+ "N722-S508 (ATGCGCAG-CTAAGCCT)",
+ "N722-S510 (ATGCGCAG-CGTCTAAT)",
+ "N722-S511 (ATGCGCAG-TCTCTCCG)",
+ "N723-S502 (TAGCGCTC-CTCTCTAT)",
+ "N723-S503 (TAGCGCTC-TATCCTCT)",
+ "N723-S505 (TAGCGCTC-GTAAGGAG)",
+ "N723-S506 (TAGCGCTC-ACTGCATA)",
+ "N723-S507 (TAGCGCTC-AAGGAGTA)",
+ "N723-S508 (TAGCGCTC-CTAAGCCT)",
+ "N723-S510 (TAGCGCTC-CGTCTAAT)",
+ "N723-S511 (TAGCGCTC-TCTCTCCG)",
+ "N724-S502 (ACTGAGCG-CTCTCTAT)",
+ "N724-S503 (ACTGAGCG-TATCCTCT)",
+ "N724-S505 (ACTGAGCG-GTAAGGAG)",
+ "N724-S506 (ACTGAGCG-ACTGCATA)",
+ "N724-S507 (ACTGAGCG-AAGGAGTA)",
+ "N724-S508 (ACTGAGCG-CTAAGCCT)",
+ "N724-S510 (ACTGAGCG-CGTCTAAT)",
+ "N724-S511 (ACTGAGCG-TCTCTCCG)",
+ "N726-S502 (CCTAAGAC-CTCTCTAT)",
+ "N726-S503 (CCTAAGAC-TATCCTCT)",
+ "N726-S505 (CCTAAGAC-GTAAGGAG)",
+ "N726-S506 (CCTAAGAC-ACTGCATA)",
+ "N726-S507 (CCTAAGAC-AAGGAGTA)",
+ "N726-S508 (CCTAAGAC-CTAAGCCT)",
+ "N726-S510 (CCTAAGAC-CGTCTAAT)",
+ "N726-S511 (CCTAAGAC-TCTCTCCG)",
+ "N727-S502 (CGATCAGT-CTCTCTAT)",
+ "N727-S503 (CGATCAGT-TATCCTCT)",
+ "N727-S505 (CGATCAGT-GTAAGGAG)",
+ "N727-S506 (CGATCAGT-ACTGCATA)",
+ "N727-S507 (CGATCAGT-AAGGAGTA)",
+ "N727-S508 (CGATCAGT-CTAAGCCT)",
+ "N727-S510 (CGATCAGT-CGTCTAAT)",
+ "N727-S511 (CGATCAGT-TCTCTCCG)",
+ "N728-S502 (TGCAGCTA-CTCTCTAT)",
+ "N728-S503 (TGCAGCTA-TATCCTCT)",
+ "N728-S505 (TGCAGCTA-GTAAGGAG)",
+ "N728-S506 (TGCAGCTA-ACTGCATA)",
+ "N728-S507 (TGCAGCTA-AAGGAGTA)",
+ "N728-S508 (TGCAGCTA-CTAAGCCT)",
+ "N728-S510 (TGCAGCTA-CGTCTAAT)",
+ "N728-S511 (TGCAGCTA-TCTCTCCG)",
+ "N729-S502 (TCGACGTC-CTCTCTAT)",
+ "N729-S503 (TCGACGTC-TATCCTCT)",
+ "N729-S505 (TCGACGTC-GTAAGGAG)",
+ "N729-S506 (TCGACGTC-ACTGCATA)",
+ "N729-S507 (TCGACGTC-AAGGAGTA)",
+ "N729-S508 (TCGACGTC-CTAAGCCT)",
+ "N729-S510 (TCGACGTC-CGTCTAAT)",
+ "N729-S511 (TCGACGTC-TCTCTCCG)",
+ "N716-S513 (ACTCGCTA-TCGACTAG)",
+ "N716-S515 (ACTCGCTA-TTCTAGCT)",
+ "N716-S516 (ACTCGCTA-CCTAGAGT)",
+ "N716-S517 (ACTCGCTA-GCGTAAGA)",
+ "N716-S518 (ACTCGCTA-CTATTAAG)",
+ "N716-S520 (ACTCGCTA-AAGGCTAT)",
+ "N716-S521 (ACTCGCTA-GAGCCTTA)",
+ "N716-S522 (ACTCGCTA-TTATGCGA)",
+ "N718-S513 (GGAGCTAC-TCGACTAG)",
+ "N718-S515 (GGAGCTAC-TTCTAGCT)",
+ "N718-S516 (GGAGCTAC-CCTAGAGT)",
+ "N718-S517 (GGAGCTAC-GCGTAAGA)",
+ "N718-S518 (GGAGCTAC-CTATTAAG)",
+ "N718-S520 (GGAGCTAC-AAGGCTAT)",
+ "N718-S521 (GGAGCTAC-GAGCCTTA)",
+ "N718-S522 (GGAGCTAC-TTATGCGA)",
+ "N719-S513 (GCGTAGTA-TCGACTAG)",
+ "N719-S515 (GCGTAGTA-TTCTAGCT)",
+ "N719-S516 (GCGTAGTA-CCTAGAGT)",
+ "N719-S517 (GCGTAGTA-GCGTAAGA)",
+ "N719-S518 (GCGTAGTA-CTATTAAG)",
+ "N719-S520 (GCGTAGTA-AAGGCTAT)",
+ "N719-S521 (GCGTAGTA-GAGCCTTA)",
+ "N719-S522 (GCGTAGTA-TTATGCGA)",
+ "N720-S513 (CGGAGCCT-TCGACTAG)",
+ "N720-S515 (CGGAGCCT-TTCTAGCT)",
+ "N720-S516 (CGGAGCCT-CCTAGAGT)",
+ "N720-S517 (CGGAGCCT-GCGTAAGA)",
+ "N720-S518 (CGGAGCCT-CTATTAAG)",
+ "N720-S520 (CGGAGCCT-AAGGCTAT)",
+ "N720-S521 (CGGAGCCT-GAGCCTTA)",
+ "N720-S522 (CGGAGCCT-TTATGCGA)",
+ "N721-S513 (TACGCTGC-TCGACTAG)",
+ "N721-S515 (TACGCTGC-TTCTAGCT)",
+ "N721-S516 (TACGCTGC-CCTAGAGT)",
+ "N721-S517 (TACGCTGC-GCGTAAGA)",
+ "N721-S518 (TACGCTGC-CTATTAAG)",
+ "N721-S520 (TACGCTGC-AAGGCTAT)",
+ "N721-S521 (TACGCTGC-GAGCCTTA)",
+ "N721-S522 (TACGCTGC-TTATGCGA)",
+ "N722-S513 (ATGCGCAG-TCGACTAG)",
+ "N722-S515 (ATGCGCAG-TTCTAGCT)",
+ "N722-S516 (ATGCGCAG-CCTAGAGT)",
+ "N722-S517 (ATGCGCAG-GCGTAAGA)",
+ "N722-S518 (ATGCGCAG-CTATTAAG)",
+ "N722-S520 (ATGCGCAG-AAGGCTAT)",
+ "N722-S521 (ATGCGCAG-GAGCCTTA)",
+ "N722-S522 (ATGCGCAG-TTATGCGA)",
+ "N723-S513 (TAGCGCTC-TCGACTAG)",
+ "N723-S515 (TAGCGCTC-TTCTAGCT)",
+ "N723-S516 (TAGCGCTC-CCTAGAGT)",
+ "N723-S517 (TAGCGCTC-GCGTAAGA)",
+ "N723-S518 (TAGCGCTC-CTATTAAG)",
+ "N723-S520 (TAGCGCTC-AAGGCTAT)",
+ "N723-S521 (TAGCGCTC-GAGCCTTA)",
+ "N723-S522 (TAGCGCTC-TTATGCGA)",
+ "N724-S513 (ACTGAGCG-TCGACTAG)",
+ "N724-S515 (ACTGAGCG-TTCTAGCT)",
+ "N724-S516 (ACTGAGCG-CCTAGAGT)",
+ "N724-S517 (ACTGAGCG-GCGTAAGA)",
+ "N724-S518 (ACTGAGCG-CTATTAAG)",
+ "N724-S520 (ACTGAGCG-AAGGCTAT)",
+ "N724-S521 (ACTGAGCG-GAGCCTTA)",
+ "N724-S522 (ACTGAGCG-TTATGCGA)",
+ "N726-S513 (CCTAAGAC-TCGACTAG)",
+ "N726-S515 (CCTAAGAC-TTCTAGCT)",
+ "N726-S516 (CCTAAGAC-CCTAGAGT)",
+ "N726-S517 (CCTAAGAC-GCGTAAGA)",
+ "N726-S518 (CCTAAGAC-CTATTAAG)",
+ "N726-S520 (CCTAAGAC-AAGGCTAT)",
+ "N726-S521 (CCTAAGAC-GAGCCTTA)",
+ "N726-S522 (CCTAAGAC-TTATGCGA)",
+ "N727-S513 (CGATCAGT-TCGACTAG)",
+ "N727-S515 (CGATCAGT-TTCTAGCT)",
+ "N727-S516 (CGATCAGT-CCTAGAGT)",
+ "N727-S517 (CGATCAGT-GCGTAAGA)",
+ "N727-S518 (CGATCAGT-CTATTAAG)",
+ "N727-S520 (CGATCAGT-AAGGCTAT)",
+ "N727-S521 (CGATCAGT-GAGCCTTA)",
+ "N727-S522 (CGATCAGT-TTATGCGA)",
+ "N728-S513 (TGCAGCTA-TCGACTAG)",
+ "N728-S515 (TGCAGCTA-TTCTAGCT)",
+ "N728-S516 (TGCAGCTA-CCTAGAGT)",
+ "N728-S517 (TGCAGCTA-GCGTAAGA)",
+ "N728-S518 (TGCAGCTA-CTATTAAG)",
+ "N728-S520 (TGCAGCTA-AAGGCTAT)",
+ "N728-S521 (TGCAGCTA-GAGCCTTA)",
+ "N728-S522 (TGCAGCTA-TTATGCGA)",
+ "N729-S513 (TCGACGTC-TCGACTAG)",
+ "N729-S515 (TCGACGTC-TTCTAGCT)",
+ "N729-S516 (TCGACGTC-CCTAGAGT)",
+ "N729-S517 (TCGACGTC-GCGTAAGA)",
+ "N729-S518 (TCGACGTC-CTATTAAG)",
+ "N729-S520 (TCGACGTC-AAGGCTAT)",
+ "N729-S521 (TCGACGTC-GAGCCTTA)",
+ "N729-S522 (TCGACGTC-TTATGCGA)",
+ ],
+ IndexEnum.NEXTFLEX_UDI_96: [
+ "UDI1 (AATCGTTA-AATAACGT)",
+ "UDI2 (GTCTACAT-TTCTTGAA)",
+ "UDI3 (CGCTGCTC-GGCAGATC)",
+ "UDI4 (GATCAACA-CTATGTTA)",
+ "UDI5 (CGAAGGAC-GTTGACGC)",
+ "UDI6 (GATGCCGG-ATCTACGA)",
+ "UDI7 (CTACGAAG-CTCGACAG)",
+ "UDI8 (GATGCGTC-GAGGCTGC)",
+ "UDI9 (CTACGGCA-CCTCGTAG)",
+ "UDI10 (GATTCCTT-CATAGGCA)",
+ "UDI11 (CTACTCGA-AGATGAAC)",
+ "UDI12 (GATTCGAG-CCGAGTAT)",
+ "UDI13 (AATCGGCG-AATATTGA)",
+ "UDI14 (TTCGCCGA-GTATACCG)",
+ "UDI15 (CTGGCCTC-GATCCAAC)",
+ "UDI16 (GAACTTAT-AGATACGC)",
+ "UDI17 (CGTATTGG-GGTATCTT)",
+ "UDI18 (GAAGCACA-CCTCTGGC)",
+ "UDI19 (CTTAATAC-CCATTGTG)",
+ "UDI20 (GAAGTCTT-ACTACGGT)",
+ "UDI21 (GAAGAGGC-AAGTGCTA)",
+ "UDI22 (CGGATAAC-GCCGAACG)",
+ "UDI23 (GAATCTGG-TGTCCACG)",
+ "UDI24 (CTGATTGA-GACACACT)",
+ "UDI25 (AATCCGTT-AATATGCT)",
+ "UDI26 (TGCGTACA-TTCTCATA)",
+ "UDI27 (GAATCAAT-TCTGTGAT)",
+ "UDI28 (TGAGTCAG-CCGAACTT)",
+ "UDI29 (GAATGCTC-GTCTAACA)",
+ "UDI30 (GAATATCC-GACGCCAT)",
+ "UDI31 (CTTATGAA-GCCAATGT)",
+ "UDI32 (TCGGCACC-CCAACGTC)",
+ "UDI33 (AAGAAGCG-GTAGATAA)",
+ "UDI34 (CTCACGAT-CTTACGGC)",
+ "UDI35 (TCGGTCGA-CCAAGTGC)",
+ "UDI36 (TCGGTAAG-CTAACTCA)",
+ "UDI37 (AAGATACA-AATATCTG)",
+ "UDI38 (GTCGCTGT-TTATATCA)",
+ "UDI39 (TCGGATGT-CTGCGGAT)",
+ "UDI40 (CGAGCCGG-GCGGCTTG)",
+ "UDI41 (CGATTATC-GAGTTGAT)",
+ "UDI42 (TCGAAGCT-GCACTGAG)",
+ "UDI43 (CTATCATT-GACCACCT)",
+ "UDI44 (CGCGCCAA-TGGCTAGG)",
+ "UDI45 (CGAACGGA-CCTACCGG)",
+ "UDI46 (CTACTGAC-GGAGGATG)",
+ "UDI47 (TCTTAAGT-CGCTGAAT)",
+ "UDI48 (TTAGAGTC-TGTGACGA)",
+ "UDI49 (AAGACGAA-AATAGATT)",
+ "UDI50 (TTATTATG-TTAGCGCA)",
+ "UDI51 (CGCTATTA-GCGGCCGT)",
+ "UDI52 (TCTATCAG-CAGTAACC)",
+ "UDI53 (CGGTGGTA-GCCTAGTA)",
+ "UDI54 (TCACCAAT-CACGGCGC)",
+ "UDI55 (CTGGAAGC-GGTGCAGA)",
+ "UDI56 (CGTAAGAG-TCGCTGAC)",
+ "UDI57 (AAGAGAGC-CAGCCAGT)",
+ "UDI58 (TCAACGAG-CGTCAACC)",
+ "UDI59 (TGCGAGAC-GCCGGCGA)",
+ "UDI60 (CCTGGTGT-GCCTCCGG)",
+ "UDI61 (AAGTAAGT-AATAGTCC)",
+ "UDI62 (TGACTGAA-TTAGACGT)",
+ "UDI63 (AAGACTGT-GTGGACTA)",
+ "UDI64 (CAATGATG-CACGGACG)",
+ "UDI65 (CACAGTAA-CACTAGAG)",
+ "UDI66 (TGGTCATT-GCAGATGG)",
+ "UDI67 (CAACCGTG-CTCTCACG)",
+ "UDI68 (TGGTGCAC-GGAATCAC)",
+ "UDI69 (CCACAATG-CGTTGACG)",
+ "UDI70 (TGTGTGCC-CATCAGGT)",
+ "UDI71 (CACCACGG-CGTTGTAA)",
+ "UDI72 (TGTGTTAA-GGCACGGT)",
+ "UDI73 (AAGTTATC-AATAGCAA)",
+ "UDI74 (GTACAGCT-TGATCGGT)",
+ "UDI75 (CAACTGCT-AGTAGTAT)",
+ "UDI76 (CATGATGA-GTTAGAGG)",
+ "UDI77 (TGACTACT-CCTTACAG)",
+ "UDI78 (CAGAAGAT-GTACATTG)",
+ "UDI79 (TGAGGCGC-GGAGACCA)",
+ "UDI80 (CAGGTTCC-CGAACACC)",
+ "UDI81 (TGAACAGG-GAGAACAA)",
+ "UDI82 (CAGTGTGG-TGTGAATC)",
+ "UDI83 (TTCCACCA-GGTTAAGG)",
+ "UDI84 (CCGCTGTT-AGACCGCA)",
+ "UDI85 (AAGTTGGA-AATACAGG)",
+ "UDI86 (GGACAACG-TGATGGCC)",
+ "UDI87 (TTCGAACC-TGTCACCT)",
+ "UDI88 (CAGACCAC-GCTTCGGC)",
+ "UDI89 (TTCTGGTG-CCAGTGGT)",
+ "UDI90 (CAATCGAA-GCACACGC)",
+ "UDI91 (AAGTACAG-GTCACGTC)",
+ "UDI92 (CCGTGCCA-GCAGCTCC)",
+ "UDI93 (CATTGCAC-CATGCAGC)",
+ "UDI94 (TTACCTGG-ACGATTGC)",
+ "UDI95 (CTGCAACG-GACATTCG)",
+ "UDI96 (TACTGTTA-GCGAATAC)",
+ ],
+ IndexEnum.NEXTFLEX_V2_UDI_96: [
+ "UDI 1 (AATCGTTA-AATAACGT)",
+ "UDI 2 (GTCTACAT-TTCTTGAA)",
+ "UDI 3 (CGCTGCTC-GGCAGATC)",
+ "UDI 4 (GATCAACA-CTATGTTA)",
+ "UDI 5 (CGAAGGAC-GTTGACGC)",
+ "UDI 6 (GATGCCGG-ATCTACGA)",
+ "UDI 7 (CTACGAAG-CTCGACAG)",
+ "UDI 8 (GATGCGTC-GAGGCTGC)",
+ "UDI 9 (CTACGGCA-CCTCGTAG)",
+ "UDI 10 (GATTCCTT-CATAGGCA)",
+ "UDI 11 (CTACTCGA-AGATGAAC)",
+ "UDI 12 (GATTCGAG-CCGAGTAT)",
+ "UDI 13 (AATCGGCG-AATATTGA)",
+ "UDI 14 (TTCGCCGA-GTATACCG)",
+ "UDI 15 (CTGGCCTC-GATCCAAC)",
+ "UDI 16 (GAACTTAT-AGATACGC)",
+ "UDI 17 (CGTATTGG-GGTATCTT)",
+ "UDI 18 (GAAGCACA-CCTCTGGC)",
+ "UDI 19 (CTTAATAC-CCATTGTG)",
+ "UDI 20 (GAAGTCTT-ACTACGGT)",
+ "UDI 21 (GAAGAGGC-AAGTGCTA)",
+ "UDI 22 (CGGATAAC-GCCGAACG)",
+ "UDI 23 (GAATCTGG-TGTCCACG)",
+ "UDI 24 (CTGATTGA-GACACACT)",
+ "UDI 25 (AATCCGTT-AATATGCT)",
+ "UDI 26 (TGCGTACA-TTCTCATA)",
+ "UDI 27 (GAATCAAT-TCTGTGAT)",
+ "UDI 28 (TGAGTCAG-CCGAACTT)",
+ "UDI 29 (GAATGCTC-GTCTAACA)",
+ "UDI 30 (GAATATCC-GACGCCAT)",
+ "UDI 31 (CTTATGAA-GCCAATGT)",
+ "UDI 32 (TCGGCACC-CCAACGTC)",
+ "UDI 33 (AAGAAGCG-GTAGATAA)",
+ "UDI 34 (CTCACGAT-CTTACGGC)",
+ "UDI 35 (TCGGTCGA-CCAAGTGC)",
+ "UDI 36 (TCGGTAAG-CTAACTCA)",
+ "UDI 37 (AAGATACA-AATATCTG)",
+ "UDI 38 (GTCGCTGT-TTATATCA)",
+ "UDI 39 (TCGGATGT-CTGCGGAT)",
+ "UDI 40 (CGAGCCGG-GCGGCTTG)",
+ "UDI 41 (CGATTATC-GAGTTGAT)",
+ "UDI 42 (TCGAAGCT-GCACTGAG)",
+ "UDI 43 (CTATCATT-GACCACCT)",
+ "UDI 44 (CGCGCCAA-TGGCTAGG)",
+ "UDI 45 (CGAACGGA-CCTACCGG)",
+ "UDI 46 (CTACTGAC-GGAGGATG)",
+ "UDI 47 (TCTTAAGT-CGCTGAAT)",
+ "UDI 48 (TTAGAGTC-TGTGACGA)",
+ "UDI 49 (AAGACGAA-AATAGATT)",
+ "UDI 50 (TTATTATG-TTAGCGCA)",
+ "UDI 51 (CGCTATTA-GCGGCCGT)",
+ "UDI 52 (TCTATCAG-CAGTAACC)",
+ "UDI 53 (CGGTGGTA-GCCTAGTA)",
+ "UDI 54 (TCACCAAT-CACGGCGC)",
+ "UDI 55 (CTGGAAGC-GGTGCAGA)",
+ "UDI 56 (TCCTCGAT-GTAACTGC)",
+ "UDI 57 (AAGAGAGC-CAGCCAGT)",
+ "UDI 58 (TCAACGAG-CGTCAACC)",
+ "UDI 59 (TGCGAGAC-GCCGGCGA)",
+ "UDI 60 (CCTGGTGT-GCCTCCGG)",
+ "UDI 61 (AAGTAAGT-AATAGTCC)",
+ "UDI 62 (TGACTGAA-TTAGACGT)",
+ "UDI 63 (AAGACTGT-GTGGACTA)",
+ "UDI 64 (CAATGATG-CACGGACG)",
+ "UDI 65 (CACAGTAA-CACTAGAG)",
+ "UDI 66 (TGGTCATT-GCAGATGG)",
+ "UDI 67 (CAACCGTG-CTCTCACG)",
+ "UDI 68 (TGGTGCAC-GGAATCAC)",
+ "UDI 69 (CCACAATG-CGTTGACG)",
+ "UDI 70 (TGTGTGCC-CATCAGGT)",
+ "UDI 71 (CACCACGG-CGTTGTAA)",
+ "UDI 72 (TGTGTTAA-GGCACGGT)",
+ "UDI 73 (AAGTTATC-AATAGCAA)",
+ "UDI 74 (GTACAGCT-TGATCGGT)",
+ "UDI 75 (CAACTGCT-AGTAGTAT)",
+ "UDI 76 (CATGATGA-GTTAGAGG)",
+ "UDI 77 (TGACTACT-CCTTACAG)",
+ "UDI 78 (CAGAAGAT-GTACATTG)",
+ "UDI 79 (TGAGGCGC-GGAGACCA)",
+ "UDI 80 (CAGGTTCC-CGAACACC)",
+ "UDI 81 (TGAACAGG-GAGAACAA)",
+ "UDI 82 (CAGTGTGG-TGTGAATC)",
+ "UDI 83 (TTCCACCA-GGTTAAGG)",
+ "UDI 84 (CCGCTGTT-AGACCGCA)",
+ "UDI 85 (AAGTTGGA-AATACAGG)",
+ "UDI 86 (GGACAACG-TGATGGCC)",
+ "UDI 87 (TTCGAACC-TGTCACCT)",
+ "UDI 88 (CAGACCAC-GCTTCGGC)",
+ "UDI 89 (TTCTGGTG-CCAGTGGT)",
+ "UDI 90 (CAATCGAA-GCACACGC)",
+ "UDI 91 (AAGTACAG-GTCACGTC)",
+ "UDI 92 (CCGTGCCA-GCAGCTCC)",
+ "UDI 93 (CATTGCAC-CATGCAGC)",
+ "UDI 94 (TTACCTGG-ACGATTGC)",
+ "UDI 95 (CTGCAACG-GACATTCG)",
+ "UDI 96 (TACTGTTA-GCGAATAC)",
+ ],
+ IndexEnum.NO_INDEX: [],
+ IndexEnum.TEN_X_TN_A: [
+ "SI_TN_A1 (AGTATCTGCA-TCGCTAGCGA)",
+ "SI_TN_B1 (CATAGCATGA-GACCTGCCTG)",
+ "SI_TN_C1 (AAGGGTTTAC-TACTCACGCG)",
+ "SI_TN_D1 (AGCGCCTTGC-GTACGAAGTG)",
+ "SI_TN_E1 (GTCATCCTAT-CTAGGGCAAA)",
+ "SI_TN_F1 (CCTGGCTATA-CCCTCACAAA)",
+ "SI_TN_G1 (TCATCGTTCT-AATTCGGGAA)",
+ "SI_TN_H1 (CGTTCCACAT-GAGGGAGCCA)",
+ "SI_TN_A2 (TCTATGAGTG-CAACCAACGA)",
+ "SI_TN_B2 (TACTGCAATA-AGAGTCCATG)",
+ "SI_TN_C2 (GCTTAAGCAA-GCCTACCGAA)",
+ "SI_TN_D2 (CAGTAATACA-TTGAGCTGAG)",
+ "SI_TN_E2 (CGACCCAGTG-TCGCACCAAC)",
+ "SI_TN_F2 (ACTTGTTCGA-TGGATGGGTG)",
+ "SI_TN_G2 (GCACTACTGA-CACTACGGTT)",
+ "SI_TN_H2 (ATCTTGCAGC-CGAGTAAGGA)",
+ "SI_TN_A3 (TTATTGACAC-GCGAACTGAT)",
+ "SI_TN_B3 (TGGCTACCGG-CTGAGTCATT)",
+ "SI_TN_C3 (GTGATCTGGG-ACTGTGTCGC)",
+ "SI_TN_D3 (ATAGAACCAC-CTAACCTAAC)",
+ "SI_TN_E3 (CATAGTTCGC-TTTCGTAACT)",
+ "SI_TN_F3 (AACCATTAGT-GTATGTCGGG)",
+ "SI_TN_G3 (GTTATCACGA-GCAACACCTC)",
+ "SI_TN_H3 (CAGCTGTTAT-AGTACGTGAG)",
+ "SI_TN_A4 (GAACAACCTT-GAACTGGTAC)",
+ "SI_TN_B4 (ACGTTTGATT-TACTGAGAGA)",
+ "SI_TN_C4 (ATGCAAGATC-ACGCCTCTGA)",
+ "SI_TN_D4 (TTACAATCGT-CAGATTGTAC)",
+ "SI_TN_E4 (TCACGTTGGG-CTTTGCTCCA)",
+ "SI_TN_F4 (TTGCGGGACT-TGAGGATCGC)",
+ "SI_TN_G4 (CAGGCGAATA-CCCTTTACCG)",
+ "SI_TN_H4 (CTACGACTGA-CATCGCCCTC)",
+ "SI_TN_A5 (TCTCGAATGT-ACGATCGCGA)",
+ "SI_TN_B5 (TGGGTGCACA-CATGCATCAT)",
+ "SI_TN_C5 (TCAAAGGGTT-GATTACTGAG)",
+ "SI_TN_D5 (CTGCCTGGGT-GACCAATAGC)",
+ "SI_TN_E5 (TAATCTTCGG-AGCCATCAAT)",
+ "SI_TN_F5 (GAGCGAAAGC-TCCTTACCAA)",
+ "SI_TN_G5 (TAAGTAGAAG-CGCGTTTCCT)",
+ "SI_TN_H5 (GCTGGGATGC-GACTAACTGG)",
+ "SI_TN_A6 (TTTGCTGGGT-CAAATTCCGG)",
+ "SI_TN_B6 (ACGAGCGGAA-AACAACTAAG)",
+ "SI_TN_C6 (TCAAGTAAAG-CAGTGCTGTT)",
+ "SI_TN_D6 (GAAAGCGCGC-GAAACATATC)",
+ "SI_TN_E6 (CCATCACCAC-GTGATCCCAA)",
+ "SI_TN_F6 (AGCATCACAT-TCGTAGAGGA)",
+ "SI_TN_G6 (CGAGGAGCAT-CAGTAAGTCT)",
+ "SI_TN_H6 (CCGTCGCTGA-AGGTGTTAGT)",
+ "SI_TN_A7 (TCCGAATAAA-ATGCTACCGC)",
+ "SI_TN_B7 (CCATTGTAAG-TACGAATTGA)",
+ "SI_TN_C7 (GCTGCTCCCA-CTACAGGGTC)",
+ "SI_TN_D7 (TGTTCGCGAA-TCGTGAAATA)",
+ "SI_TN_E7 (CCATTAGGCG-GCATTTCATC)",
+ "SI_TN_F7 (ACCATTGCAC-ACAGTTAAGC)",
+ "SI_TN_G7 (ATCACCGTTT-TGTCGAGGAG)",
+ "SI_TN_H7 (AAGTTAGTAC-ACGCGGAATA)",
+ "SI_TN_A8 (GTTTGAAAGT-GTACGCCATG)",
+ "SI_TN_B8 (TAGTAGTTTG-TCATCGGGCG)",
+ "SI_TN_C8 (ATCTGTAGTT-AGGCCCAATG)",
+ "SI_TN_D8 (GCGTAACGAT-ATTCGTTCAA)",
+ "SI_TN_E8 (CTGGTGATAA-AGGGACCTGG)",
+ "SI_TN_F8 (GTTCTGGAAC-CGGGTACTGG)",
+ "SI_TN_G8 (GACCGCCTTT-TTTAACTCGT)",
+ "SI_TN_H8 (GACTCAGGGT-GAGACCCTTC)",
+ "SI_TN_A9 (GCACGTGACA-AGGAAGTCTG)",
+ "SI_TN_B9 (GATGGAAGGT-AAATTGAGCA)",
+ "SI_TN_C9 (ATGGCGCAAA-CGATGCAAGC)",
+ "SI_TN_D9 (TGTCAGTAAG-AGATGACATC)",
+ "SI_TN_E9 (CGTTGGTCCG-GTAGCTGATA)",
+ "SI_TN_F9 (CGGACGACCT-GTTGCGCCTC)",
+ "SI_TN_G9 (TCCCGACCTG-AACCCACCAA)",
+ "SI_TN_H9 (TGCACAAGCG-TGATGTTTGC)",
+ "SI_TN_A10 (GTCGTTGCCT-AGAACTTCTT)",
+ "SI_TN_B10 (ACAGGTTACG-AGATAAACAG)",
+ "SI_TN_C10 (TAATGGGCAA-GCGCATAGGC)",
+ "SI_TN_D10 (CTACGTAGGT-GTCCCACTTA)",
+ "SI_TN_E10 (CTTCCTACTT-TCCTCCTGTA)",
+ "SI_TN_F10 (TGGACCTTTG-TAAATCTCTG)",
+ "SI_TN_G10 (ATTGTACAGT-TTGGTTACGT)",
+ "SI_TN_H10 (TTCGCTTAAC-CCGCTCGTTA)",
+ "SI_TN_A11 (TCGTACGATG-ACCCTCCCAT)",
+ "SI_TN_B11 (AGTAGTTTGG-ATAGCATGCA)",
+ "SI_TN_C11 (TAACTGTAGT-ATGACCGATA)",
+ "SI_TN_D11 (CCTAGGCAAA-CAGAAATATC)",
+ "SI_TN_E11 (AGACGCATCT-TTGTACGTGG)",
+ "SI_TN_F11 (TCATATGTGA-AAAGTGTTCT)",
+ "SI_TN_G11 (GACAATTGGG-ACATTTGGAA)",
+ "SI_TN_H11 (AGACGACCGA-CCACAGAACA)",
+ "SI_TN_A12 (CGCGAGTAGG-CCTGGTGACA)",
+ "SI_TN_B12 (TCGCCATTTG-ACGGGCATGT)",
+ "SI_TN_C12 (AGCCTTCTCT-CTGTCCGCGT)",
+ "SI_TN_D12 (TTAACGGACG-CAAACGTCGC)",
+ "SI_TN_E12 (TCGGGAGCTG-CCAGACTGCA)",
+ "SI_TN_F12 (CGCCCTCATC-GCGTCTACGC)",
+ "SI_TN_G12 (ATTGGCGCAA-TCAATTGCAA)",
+ "SI_TN_H12 (AGTGGAGGGA-TGCATAGTTT)",
+ ],
+ IndexEnum.TEN_X_TT_A: [
+ "SI_TT_A1 (GTAACATGCG-AGTGTTACCT)",
+ "SI_TT_B1 (ACAGTAACTA-ACAGTTCGTT)",
+ "SI_TT_C1 (TGCGCGGTTT-CAAGGATAAA)",
+ "SI_TT_D1 (TGCAATGTTC-GCTTGTCGAA)",
+ "SI_TT_E1 (TTATTCGAGG-CTGTCCTGCT)",
+ "SI_TT_F1 (AAGATTGGAT-AGCGGGATTT)",
+ "SI_TT_G1 (TGTAGTCATT-CTTGATCGTA)",
+ "SI_TT_H1 (ACAATGTGAA-CGTACCGTTA)",
+ "SI_TT_A2 (GTGGATCAAA-GCCAACCCTG)",
+ "SI_TT_B2 (TCTACCATTT-CGGGAGAGTC)",
+ "SI_TT_C2 (CAATCCCGAC-CCGAGTAGTA)",
+ "SI_TT_D2 (TTAATACGCG-CACCTCGGGT)",
+ "SI_TT_E2 (ATGGAGGGAG-ATAACCCATT)",
+ "SI_TT_F2 (AAGGGCCGCA-CTGATTCCTC)",
+ "SI_TT_G2 (CATGTGGGTT-GATTCCTTTA)",
+ "SI_TT_H2 (TAGCATAGTG-CGGCTCTGTC)",
+ "SI_TT_A3 (CACTACGAAA-TTAGACTGAT)",
+ "SI_TT_B3 (CACGGTGAAT-GTTCGTCACA)",
+ "SI_TT_C3 (ATGGCTTGTG-GAATGTTGTG)",
+ "SI_TT_D3 (CCTTCTAGAG-AATACAACGA)",
+ "SI_TT_E3 (ACCAGACAAC-AGGAACTAGG)",
+ "SI_TT_F3 (GAGAGGATAT-TTGAAATGGG)",
+ "SI_TT_G3 (ATGACGTCGC-AGGTCAGGAT)",
+ "SI_TT_H3 (CCCGTTCTCG-GACGGATTGG)",
+ "SI_TT_A4 (CTCTAGCGAG-TATCTTCATC)",
+ "SI_TT_B4 (GTAGACGAAA-CTAGTGTGGT)",
+ "SI_TT_C4 (TTCTCGATGA-TGTCGGGCAC)",
+ "SI_TT_D4 (GCAGTATAGG-TTCCGTGCAC)",
+ "SI_TT_E4 (AACCACGCAT-ATTCAGGTTA)",
+ "SI_TT_F4 (CCCACCACAA-ACCTCCGCTT)",
+ "SI_TT_G4 (GCGCTTATGG-GCCTGGCTAG)",
+ "SI_TT_H4 (AGTTTCCTGG-TGCCACACAG)",
+ "SI_TT_A5 (GTAGCCCTGT-GAGCATCTAT)",
+ "SI_TT_B5 (TCGGCTCTAC-CCGATGGTCT)",
+ "SI_TT_C5 (TCCGTTGGAT-ACGTTCTCGC)",
+ "SI_TT_D5 (TGGTTCGGGT-GTGGCAGGAG)",
+ "SI_TT_E5 (CGCGGTAGGT-CAGGATGTTG)",
+ "SI_TT_F5 (CGGCTGGATG-TGATAAGCAC)",
+ "SI_TT_G5 (ATAGGGCGAG-TGCATCGAGT)",
+ "SI_TT_H5 (AGCAAGAAGC-TTGTGTTTCT)",
+ "SI_TT_A6 (TAACGCGTGA-CCCTAACTTC)",
+ "SI_TT_B6 (AATGCCATGA-TACGTAATGC)",
+ "SI_TT_C6 (ACGACTACCA-ACGACCCTAA)",
+ "SI_TT_D6 (CCCAGCTTCT-GACACCAAAC)",
+ "SI_TT_E6 (TTGAGAGTCA-AACCTGGTAG)",
+ "SI_TT_F6 (TTGCCCGTGC-GCGTGAGATT)",
+ "SI_TT_G6 (GCGGGTAAGT-TAGCACTAAG)",
+ "SI_TT_H6 (CCTATCCTCG-GAATACTAAC)",
+ "SI_TT_A7 (TCCCAAGGGT-TACTACCTTT)",
+ "SI_TT_B7 (GCCTTCGGTA-CCAACGATTT)",
+ "SI_TT_C7 (CGCGCACTTA-CCTGTATTCT)",
+ "SI_TT_D7 (CCTGTCAGGG-AGCCCGTAAC)",
+ "SI_TT_E7 (GTCCTTCGGC-TCATGCACAG)",
+ "SI_TT_F7 (AATGTATCCA-AATGAGCTTA)",
+ "SI_TT_G7 (GTTTCACGAT-TTCGGCCAAA)",
+ "SI_TT_H7 (ACCTCGAGCT-TGTGTTCGAT)",
+ "SI_TT_A8 (CGAAGTATAC-GAACTTGGAG)",
+ "SI_TT_B8 (GCACTGAGAA-TATGCGTGAA)",
+ "SI_TT_C8 (GCTACAAAGC-CACGTGCCCT)",
+ "SI_TT_D8 (CGCTGAAATC-AGGTGTCTGC)",
+ "SI_TT_E8 (GAGCAAGGGC-ATTGACTTGG)",
+ "SI_TT_F8 (CTCCTTTAGA-GACATAGCTC)",
+ "SI_TT_G8 (TAAGCAACTG-CTATACTCAA)",
+ "SI_TT_H8 (ATAAGGATAC-ATAGATAGGG)",
+ "SI_TT_A9 (AAGTGGAGAG-TTCCTGTTAC)",
+ "SI_TT_B9 (TATTGAGGCA-CAGGTAAGTG)",
+ "SI_TT_C9 (TATCAGCCTA-GTTTCGTCCT)",
+ "SI_TT_D9 (TGGTCCCAAG-CCTCTGGCGT)",
+ "SI_TT_E9 (TGTCCCAACG-TCGATGTCCA)",
+ "SI_TT_F9 (GTCCCATCAA-CGAACGTGAC)",
+ "SI_TT_G9 (CCGGAGGAAG-TGCGGATGTT)",
+ "SI_TT_H9 (AGAACTTAGA-CGAGTCCTTT)",
+ "SI_TT_A10 (CGTGACATGC-ATGGTCTAAA)",
+ "SI_TT_B10 (GCCCGATGGA-AATCGTCTAG)",
+ "SI_TT_C10 (AGAATGGTTT-GAGGGTGGGA)",
+ "SI_TT_D10 (ATGCGAATGG-ACAAGTGTCG)",
+ "SI_TT_E10 (CACAATCCCA-ATATCCACAA)",
+ "SI_TT_F10 (CCGGCAACTG-CGGTTTAACA)",
+ "SI_TT_G10 (ACTTTACGTG-TGAACGCCCT)",
+ "SI_TT_H10 (TTATCTAGGG-AAAGGCTCTA)",
+ "SI_TT_A11 (CGGAACCCAA-GATTCGAGGA)",
+ "SI_TT_B11 (TCTTACTTGC-TGACCTCTAG)",
+ "SI_TT_C11 (ATGGGTGAAA-CTTGGGAATT)",
+ "SI_TT_D11 (CGAATATTCG-CTGGAAGCAA)",
+ "SI_TT_E11 (TCCGGGACAA-GTGAATGCCA)",
+ "SI_TT_F11 (TTCACACCTT-TAGTGTACAC)",
+ "SI_TT_G11 (GATAACCTGC-CATTAGAAAC)",
+ "SI_TT_H11 (ACAATCGATC-TGACGGAATG)",
+ "SI_TT_A12 (CACCGCACCA-GACTGTCAAT)",
+ "SI_TT_B12 (CGTCAAGGGC-TAGGTCACTC)",
+ "SI_TT_C12 (TCGTCAAGAT-GCAACTCAGG)",
+ "SI_TT_D12 (GAATTGGTTA-ACTCTAGTAG)",
+ "SI_TT_E12 (CGTCCACCTG-CATTCATGAC)",
+ "SI_TT_F12 (GAGACGCACG-CTATGAACAT)",
+ "SI_TT_G12 (CTTGCATAAA-ATCAGGGCTT)",
+ "SI_TT_H12 (TGATGATTCA-GTAGGAGTCG)",
+ ],
+ IndexEnum.TRUSEQ_DNA_HT: [
+ "A01 - D701-D501 (ATTACTCG-TATAGCCT)",
+ "B01 - D701-D502 (ATTACTCG-ATAGAGGC)",
+ "C01 - D701-D503 (ATTACTCG-CCTATCCT)",
+ "D01 - D701-D504 (ATTACTCG-GGCTCTGA)",
+ "E01 - D701-D505 (ATTACTCG-AGGCGAAG)",
+ "F01 - D701-D506 (ATTACTCG-TAATCTTA)",
+ "G01 - D701-D507 (ATTACTCG-CAGGACGT)",
+ "H01 - D701-D508 (ATTACTCG-GTACTGAC)",
+ "A02 - D702-D501 (TCCGGAGA-TATAGCCT)",
+ "B02 - D702-D502 (TCCGGAGA-ATAGAGGC)",
+ "C02 - D702-D503 (TCCGGAGA-CCTATCCT)",
+ "D02 - D702-D504 (TCCGGAGA-GGCTCTGA)",
+ "E02 - D702-D505 (TCCGGAGA-AGGCGAAG)",
+ "F02 - D702-D506 (TCCGGAGA-TAATCTTA)",
+ "G02 - D702-D507 (TCCGGAGA-CAGGACGT)",
+ "H02 - D702-D508 (TCCGGAGA-GTACTGAC)",
+ "A03 - D703-D501 (CGCTCATT-TATAGCCT)",
+ "B03 - D703-D502 (CGCTCATT-ATAGAGGC)",
+ "C03 - D703-D503 (CGCTCATT-CCTATCCT)",
+ "D03 - D703-D504 (CGCTCATT-GGCTCTGA)",
+ "E03 - D703-D505 (CGCTCATT-AGGCGAAG)",
+ "F03 - D703-D506 (CGCTCATT-TAATCTTA)",
+ "G03 - D703-D507 (CGCTCATT-CAGGACGT)",
+ "H03 - D703-D508 (CGCTCATT-GTACTGAC)",
+ "A04 - D704-D501 (GAGATTCC-TATAGCCT)",
+ "B04 - D704-D502 (GAGATTCC-ATAGAGGC)",
+ "C04 - D704-D503 (GAGATTCC-CCTATCCT)",
+ "D04 - D704-D504 (GAGATTCC-GGCTCTGA)",
+ "E04 - D704-D505 (GAGATTCC-AGGCGAAG)",
+ "F04 - D704-D506 (GAGATTCC-TAATCTTA)",
+ "G04 - D704-D507 (GAGATTCC-CAGGACGT)",
+ "H04 - D704-D508 (GAGATTCC-GTACTGAC)",
+ "A05 - D705-D501 (ATTCAGAA-TATAGCCT)",
+ "B05 - D705-D502 (ATTCAGAA-ATAGAGGC)",
+ "C05 - D705-D503 (ATTCAGAA-CCTATCCT)",
+ "D05 - D705-D504 (ATTCAGAA-GGCTCTGA)",
+ "E05 - D705-D505 (ATTCAGAA-AGGCGAAG)",
+ "F05 - D705-D506 (ATTCAGAA-TAATCTTA)",
+ "G05 - D705-D507 (ATTCAGAA-CAGGACGT)",
+ "H05 - D705-D508 (ATTCAGAA-GTACTGAC)",
+ "A06 - D706-D501 (GAATTCGT-TATAGCCT)",
+ "B06 - D706-D502 (GAATTCGT-ATAGAGGC)",
+ "C06 - D706-D503 (GAATTCGT-CCTATCCT)",
+ "D06 - D706-D504 (GAATTCGT-GGCTCTGA)",
+ "E06 - D706-D505 (GAATTCGT-AGGCGAAG)",
+ "F06 - D706-D506 (GAATTCGT-TAATCTTA)",
+ "G06 - D706-D507 (GAATTCGT-CAGGACGT)",
+ "H06 - D706-D508 (GAATTCGT-GTACTGAC)",
+ "A07 - D707-D501 (CTGAAGCT-TATAGCCT)",
+ "B07 - D707-D502 (CTGAAGCT-ATAGAGGC)",
+ "C07 - D707-D503 (CTGAAGCT-CCTATCCT)",
+ "D07 - D707-D504 (CTGAAGCT-GGCTCTGA)",
+ "E07 - D707-D505 (CTGAAGCT-AGGCGAAG)",
+ "F07 - D707-D506 (CTGAAGCT-TAATCTTA)",
+ "G07 - D707-D507 (CTGAAGCT-CAGGACGT)",
+ "H07 - D707-D508 (CTGAAGCT-GTACTGAC)",
+ "A08 - D708-D501 (TAATGCGC-TATAGCCT)",
+ "B08 - D708-D502 (TAATGCGC-ATAGAGGC)",
+ "C08 - D708-D503 (TAATGCGC-CCTATCCT)",
+ "D08 - D708-D504 (TAATGCGC-GGCTCTGA)",
+ "E08 - D708-D505 (TAATGCGC-AGGCGAAG)",
+ "F08 - D708-D506 (TAATGCGC-TAATCTTA)",
+ "G08 - D708-D507 (TAATGCGC-CAGGACGT)",
+ "H08 - D708-D508 (TAATGCGC-GTACTGAC)",
+ "A09 - D709-D501 (CGGCTATG-TATAGCCT)",
+ "B09 - D709-D502 (CGGCTATG-ATAGAGGC)",
+ "C09 - D709-D503 (CGGCTATG-CCTATCCT)",
+ "D09 - D709-D504 (CGGCTATG-GGCTCTGA)",
+ "E09 - D709-D505 (CGGCTATG-AGGCGAAG)",
+ "F09 - D709-D506 (CGGCTATG-TAATCTTA)",
+ "G09 - D709-D507 (CGGCTATG-CAGGACGT)",
+ "H09 - D709-D508 (CGGCTATG-GTACTGAC)",
+ "A10 - D710-D501 (TCCGCGAA-TATAGCCT)",
+ "B10 - D710-D502 (TCCGCGAA-ATAGAGGC)",
+ "C10 - D710-D503 (TCCGCGAA-CCTATCCT)",
+ "D10 - D710-D504 (TCCGCGAA-GGCTCTGA)",
+ "E10 - D710-D505 (TCCGCGAA-AGGCGAAG)",
+ "F10 - D710-D506 (TCCGCGAA-TAATCTTA)",
+ "G10 - D710-D507 (TCCGCGAA-CAGGACGT)",
+ "H10 - D710-D508 (TCCGCGAA-GTACTGAC)",
+ "A11 - D711-D501 (TCTCGCGC-TATAGCCT)",
+ "B11 - D711-D502 (TCTCGCGC-ATAGAGGC)",
+ "C11 - D711-D503 (TCTCGCGC-CCTATCCT)",
+ "D11 - D711-D504 (TCTCGCGC-GGCTCTGA)",
+ "E11 - D711-D505 (TCTCGCGC-AGGCGAAG)",
+ "F11 - D711-D506 (TCTCGCGC-TAATCTTA)",
+ "G11 - D711-D507 (TCTCGCGC-CAGGACGT)",
+ "H11 - D711-D508 (TCTCGCGC-GTACTGAC)",
+ "A12 - D712-D501 (AGCGATAG-TATAGCCT)",
+ "B12 - D712-D502 (AGCGATAG-ATAGAGGC)",
+ "C12 - D712-D503 (AGCGATAG-CCTATCCT)",
+ "D12 - D712-D504 (AGCGATAG-GGCTCTGA)",
+ "E12 - D712-D505 (AGCGATAG-AGGCGAAG)",
+ "F12 - D712-D506 (AGCGATAG-TAATCTTA)",
+ "G12 - D712-D507 (AGCGATAG-CAGGACGT)",
+ "H12 - D712-D508 (AGCGATAG-GTACTGAC)",
+ ],
+ IndexEnum.TWIST_UDI_A: [
+ "A01 TWIST (TATCTTCAGC- CCAATATTCG)",
+ "B01 TWIST (TGCACGGATA- CGCAGACAAC)",
+ "C01 TWIST (GGTTGATAGA- TCGGAGCAGA)",
+ "D01 TWIST (ACTCCTGCCT- GAGTCCGTAG)",
+ "E01 TWIST (CCGATAGTCG- ATGTTCACGT)",
+ "F01 TWIST (CAAGATCGAA- TTCGATGGTT)",
+ "G01 TWIST (AGGCTCCTTC- TATCCGTGCA)",
+ "H01 TWIST (ATACGGATAG- AAGCGCAGAG)",
+ "A02 TWIST (AATAGCCTCA- CCGACTTAGT)",
+ "B02 TWIST (CTGCAATCGG- TTCTGCATCG)",
+ "C02 TWIST (CCTGAGTTAT- GGAAGTGCCA)",
+ "D02 TWIST (GACGTCCAGA- AGATTCAACC)",
+ "E02 TWIST (GAATAATCGG- TTCAGGAGAT)",
+ "F02 TWIST (CGGAGTGTGT- AAGGCGTCTG)",
+ "G02 TWIST (TTACCGACCG- ACGCTTGACA)",
+ "H02 TWIST (AGTGTTCGCC- CATGAAGTGA)",
+ "A03 TWIST (CTACGTTCTT- TTACGACCTG)",
+ "B03 TWIST (TCGACACGAA- ATGCAAGCCG)",
+ "C03 TWIST (CCGATAACTT- CTCCGTATAC)",
+ "D03 TWIST (TTGGACATCG- GAATCTGGTC)",
+ "E03 TWIST (AACGTTGAGA- CGGTCGGTAA)",
+ "F03 TWIST (GGCCAGTGAA- TCTGCTAATG)",
+ "G03 TWIST (ATGTCTCCGG- CTCTTATTCG)",
+ "H03 TWIST (GAAGGCGTTC- CACCTCTAGC)",
+ "A04 TWIST (TGTTCCTAGA- TTACTTACCG)",
+ "B04 TWIST (CTCTCGAGGT- CTATGCCTTA)",
+ "C04 TWIST (CTGTACGGTA- GGAAGGTACG)",
+ "D04 TWIST (CTTATGGCAA- GAGGAGACGT)",
+ "E04 TWIST (TCCGCATAGC- ACGCAAGGCA)",
+ "F04 TWIST (GCAAGCACCT- TATCCTGACG)",
+ "G04 TWIST (GCCTGTCCTA- GAAGACCGCT)",
+ "H04 TWIST (ACTGTCTATC- CAACGTGGAC)",
+ "A05 TWIST (CGTCCATGTA- TAAGTGCTCG)",
+ "B05 TWIST (CTAACTGCAA- CACATCGTAG)",
+ "C05 TWIST (TGCTTGTGGT- ACTACCGAGG)",
+ "D05 TWIST (TGTAAGCACA- GATGTGTTCT)",
+ "E05 TWIST (CTCGTTGCGT- AAGTGTCGTA)",
+ "F05 TWIST (GCTAGAGGTG- GGAGAACCAC)",
+ "G05 TWIST (AAGCGGAGAA- TGTACGAACT)",
+ "H05 TWIST (AATGACGCTG- GGATGAGTGC)",
+ "A06 TWIST (TTGGTACGCG- TAGTAGGACA)",
+ "B06 TWIST (TGAAGGTGAA- ACGCCTCGTT)",
+ "C06 TWIST (GTAGTGGCTT- CACCGCTGTT)",
+ "D06 TWIST (CGTAACAGAA- TCTATAGCGG)",
+ "E06 TWIST (AAGGCCATAA- CCGATGGACA)",
+ "F06 TWIST (TTCATAGACC- TTCAACATGC)",
+ "G06 TWIST (CCAACTCCGA- GGAGTAACGC)",
+ "H06 TWIST (CACGAGTATG- AGCCTTAGCG)",
+ "A07 TWIST (CCGCTACCAA- TTACCTCAGT)",
+ "B07 TWIST (CTGAACCTCC- CAGGCATTGT)",
+ "C07 TWIST (GGCCTTGTTA- GTGTTCCACG)",
+ "D07 TWIST (TTAACGCAGA- TTGATCCGCC)",
+ "E07 TWIST (AGGTAGTGCG- GGAGGCTGAT)",
+ "F07 TWIST (CGTGTAACTT- AACGTGACAA)",
+ "G07 TWIST (ACTTGTGACG- CACAAGCTCC)",
+ "H07 TWIST (CCATGCGTTG- CCGTGTTGTC)",
+ "A08 TWIST (CCTTGTAGCG- TTGAGCCAGC)",
+ "B08 TWIST (ACATACGTGA- GCGTTACAGA)",
+ "C08 TWIST (CTTGATATCC- TCCAGACATT)",
+ "D08 TWIST (CAGCCGATGT- TCGAACTCTT)",
+ "E08 TWIST (TCATGCGCTA- ACCTTCTCGG)",
+ "F08 TWIST (ACTCCGTCCA- AGACGCCAAC)",
+ "G08 TWIST (GACAGCCTTG- CAACCGTAAT)",
+ "H08 TWIST (CGGTTATCTG- TTATGCGTTG)",
+ "A09 TWIST (TACTCCACGG- CTATGAGAAC)",
+ "B09 TWIST (ACTTCCGGCA- AAGTTACACG)",
+ "C09 TWIST (GTGAAGCTGC- GCAATGTGAG)",
+ "D09 TWIST (TTGCTCTTCT- CGAAGTCGCA)",
+ "E09 TWIST (AACGCACGTA- CCTGATTCAA)",
+ "F09 TWIST (TTACTGCAGG- TAGAACGTGC)",
+ "G09 TWIST (CCAGTTGAGG- TTCGCAAGGT)",
+ "H09 TWIST (TGTGCGTTAA- TTAATGCCGA)",
+ "A10 TWIST (ACTAGTGCTT- AGAACAGAGT)",
+ "B10 TWIST (CGTGGAACAC- CCATCTGTTC)",
+ "C10 TWIST (ATGGAAGTGG- TTCGTAGGTG)",
+ "D10 TWIST (TGAGATCACA- GCACGGTACA)",
+ "E10 TWIST (GTCCTTGGTG- TGTCAAGAGG)",
+ "F10 TWIST (GAGCGTGGAA- TCTAAGGTAC)",
+ "G10 TWIST (CACACGCTGT- GAACGGAGAC)",
+ "H10 TWIST (TGGTTGTACA- CGCTACCATC)",
+ "A11 TWIST (ATCACTCACA- TTACGGTAAC)",
+ "B11 TWIST (CGGAGGTAGA- TTCAGATGGA)",
+ "C11 TWIST (GAGTTGACAA- TAGCATCTGT)",
+ "D11 TWIST (GCCGAACTTG- GGACGAGATC)",
+ "E11 TWIST (AGGCCTCACA- AGGTTCTGTT)",
+ "F11 TWIST (TCTCTGTTAG- CATACTCGTG)",
+ "G11 TWIST (TCCGACGATT- CCGGATACCA)",
+ "H11 TWIST (AGGCTATGTT- ATGTCCACCG)",
+ "A12 TWIST (CGTTCTCTTG- CACCAAGTGG)",
+ "B12 TWIST (TTGTCTATGG- TTGAGTACAC)",
+ "C12 TWIST (GATGGATACA- CGGTTCCGTA)",
+ "D12 TWIST (CACTTAGGCG- GGAGGTCCTA)",
+ "E12 TWIST (ACACTGGCTA- CCTGCTTGGA)",
+ "F12 TWIST (ATCGCCACTG- TTCACGTCAG)",
+ "G12 TWIST (CTGACGTGAA- AACATAGCCT)",
+ "H12 TWIST (TCAATCGTCT- TGACATAGTC)",
+ ],
+ IndexEnum.TWIST_UDI_B: [
+ "A01 TWIST (ATCGCCTATA-TTGGCTCATA)",
+ "B01 TWIST (CGGATTCCTG-CAGAATACGG)",
+ "C01 TWIST (TCACACGTGG-TGTATAGGTC)",
+ "D01 TWIST (GCAGCATTCC-GTATACCACA)",
+ "E01 TWIST (CCGTGGTGAA-AACTGGACGG)",
+ "F01 TWIST (CACAGAACGG-TGTGAGTGAT)",
+ "G01 TWIST (ATGGATCGAA-AACTCAGCAA)",
+ "H01 TWIST (GGTCTCACCT-AGACGATTGA)",
+ "A02 TWIST (CAACACCGTA-CGGCTTGTTC)",
+ "B02 TWIST (CGAATATTGG-TTCCGTGCTG)",
+ "C02 TWIST (TAATTCCAGC-CGAATACGAT)",
+ "D02 TWIST (GTCGCGGTTA-ACCTCACCAG)",
+ "E02 TWIST (TTCTGCGTCG-TTCGTACACC)",
+ "F02 TWIST (ACGCATACTT-AAGTACGAGA)",
+ "G02 TWIST (GGCTGCACAA-TCGGACCTCT)",
+ "H02 TWIST (ACCAAGCCAA-CCGCCTTGTA)",
+ "A03 TWIST (CCAATTGTCC-GCGTATGAGC)",
+ "B03 TWIST (CAGACGCCTT-TTGAGCTCTG)",
+ "C03 TWIST (AATTGCCAGA-AACGTACCGT)",
+ "D03 TWIST (TGATACCAGA-GGCCTTCACA)",
+ "E03 TWIST (GAGGTTGTTA-TGTGCACTGG)",
+ "F03 TWIST (AGAGTATCAG-GGATACAGGT)",
+ "G03 TWIST (CTGGCGTATG-CCAATGTTAC)",
+ "H03 TWIST (GGTCATCTCG-GCTATGCGGA)",
+ "A04 TWIST (TGTCGAACAA-CCAGAATCTA)",
+ "B04 TWIST (GTGGCACGAA-CCAATTAGCA)",
+ "C04 TWIST (AAGCCTTAGA-CGTGTTATGA)",
+ "D04 TWIST (CGCTAAGGCT-TGTGCCGGTT)",
+ "E04 TWIST (AATCACGACC-CACCAGAAGT)",
+ "F04 TWIST (GTAGCTGTCG-TCTGCGTTAA)",
+ "G04 TWIST (CACGTAAGGT-AGCTTAGAGG)",
+ "H04 TWIST (TCACTTCATG-TTGCGACCAC)",
+ "A05 TWIST (GTTGGCGTCT-CGAAGTCTAG)",
+ "B05 TWIST (CACACGCCAA-GCTGAAGATA)",
+ "C05 TWIST (ACACTGTGAA-TCTGTTAGAC)",
+ "D05 TWIST (CGATTGTTCT-TGTACAACCA)",
+ "E05 TWIST (TCGGCTACTG-CTATTGTGTG)",
+ "F05 TWIST (TTGTAAGAGG-GAAGCAGCTG)",
+ "G05 TWIST (CGAGTCCGTT-CCGCAGTAGT)",
+ "H05 TWIST (GTGTACTCAA-AAGGTTGCTT)",
+ "A06 TWIST (GCGTGACGTT-CTCTCTTCTA)",
+ "B06 TWIST (AGGCGTCTGA-GGATCTTGTG)",
+ "C06 TWIST (ACTTACGAGG-AGCGATTAAC)",
+ "D06 TWIST (CAGGTCGTAA-GAAGGCATAA)",
+ "E06 TWIST (TACGCTAGTT-AGCAGACTAA)",
+ "F06 TWIST (TCTGTCGTGC-AAGCACTAGT)",
+ "G06 TWIST (GATCTTGGCA-TTAGACAGCG)",
+ "H06 TWIST (TGGAGAGCCA-TTAGGCACAA)",
+ "A07 TWIST (ACCAATCTCG-TTCCGGCACT)",
+ "B07 TWIST (GTCGTGACAC-TTGTATGGCT)",
+ "C07 TWIST (TCTCTAGTCG-TGGATCGATT)",
+ "D07 TWIST (ATTACGGTTG-CGGAATCACC)",
+ "E07 TWIST (CGGTAAGTAA-GAGCTATCTA)",
+ "F07 TWIST (TAACGTCCGG-ACCTCGAGAG)",
+ "G07 TWIST (GAACACAGTT-CCGAATTCAC)",
+ "H07 TWIST (AGGTCCTATA-AACGTCACGC)",
+ "A08 TWIST (TTGACCTAGC-TTGGTGTTCC)",
+ "B08 TWIST (GCTTCAATCA-CCAGGTGGAA)",
+ "C08 TWIST (TGCGTGCGAA-TCATACCGAT)",
+ "D08 TWIST (AATGGTACCT-CGACGGTTGT)",
+ "E08 TWIST (TGTATCGCGA-CACTCACACG)",
+ "F08 TWIST (GTAACATTGG-TTGGCCACGA)",
+ "G08 TWIST (CAACAATTCG-AATCGGTCGC)",
+ "H08 TWIST (GCGTGTCATG-AGAACAATCG)",
+ "A09 TWIST (TAGATCCGAA-CTATCGAAGT)",
+ "B09 TWIST (TCTTAACTGG-TCGGCCTGAA)",
+ "C09 TWIST (GTCACATCCG-TCACTGTTCT)",
+ "D09 TWIST (TGAAGCATCT-GGTATCTAAC)",
+ "E09 TWIST (CGGACTACTT-CGTATTAAGG)",
+ "F09 TWIST (AACGGAGTCC-TAGGAGTGTC)",
+ "G09 TWIST (AGGTGTGACC-CTCCGAACTC)",
+ "H09 TWIST (CCAGAGTTCC-ATGTCTCTCG)",
+ "A10 TWIST (CCAGTGATTG-AGGTGCACTT)",
+ "B10 TWIST (GACTGACATA-TTGGCCGCAT)",
+ "C10 TWIST (GCGATCCTTG-GGTGTCTGAG)",
+ "D10 TWIST (TGTTCCACTT-CCGTGCCATT)",
+ "E10 TWIST (ATCCAATAGG-AAGATGACGA)",
+ "F10 TWIST (AGACCGTTAA-TGTATTGCCA)",
+ "G10 TWIST (ACTATTGACC-AACCATCGGC)",
+ "H10 TWIST (GCCTAATTCC-CGTGCAACCT)",
+ "A11 TWIST (GTAGGTACAA-TTCTTGAGTG)",
+ "B11 TWIST (TGCGACTTCG-TCTGCAACAA)",
+ "C11 TWIST (TTGTCACGTT-CCGCTACACA)",
+ "D11 TWIST (CAACGACTGA-CTCTGTCAGG)",
+ "E11 TWIST (GATTCGGCTA-TTAACGGTCT)",
+ "F11 TWIST (TGGTGGCTAG-CGATGACCTT)",
+ "G11 TWIST (AGGCCAGGAT-AGGCAGGAGT)",
+ "H11 TWIST (AACGCCTGTG-AACGGACTCG)",
+ "A12 TWIST (CGTGTGAGTG-TTGGTTCGGC)",
+ "B12 TWIST (CGTATGTGAA-CGCACTACCT)",
+ "C12 TWIST (TACGTCACAA-CCATACCACG)",
+ "D12 TWIST (GGAAGATCCG-GAATTCGGTA)",
+ "E12 TWIST (CATGTCAGCT-AGTCCTCCAC)",
+ "F12 TWIST (ACAGCGTCAC-TAGTCATTCG)",
+ "G12 TWIST (TGTTACAAGG-TTGAGGTCGC)",
+ "H12 TWIST (CTTATAGAGG-CAACGTTATG)",
+ ],
+ IndexEnum.TWIST_UDI_C: [
+ "A01 TWIST (TCGAAGTCAA-CGGAGGAATG)",
+ "B01 TWIST (CGAGGCCTAT-GAGTCAGCCA)",
+ "C01 TWIST (TCCTGAAGTG-GGAATTAGGC)",
+ "D01 TWIST (AATCCTTACC-TTCGCCACAC)",
+ "E01 TWIST (TTGTTGCAGA-CCTCGCTTAC)",
+ "F01 TWIST (AATCTAGGCC-ACAGCGTGTG)",
+ "G01 TWIST (GGCTCTACTG-TTCCGCTTCT)",
+ "H01 TWIST (GTCCACGTTG-CAGCGTCATT)",
+ "A02 TWIST (CTCCGCAGTT-CCGTAGAACA)",
+ "B02 TWIST (AGAACAGTGA-CGGTTATCGT)",
+ "C02 TWIST (GCTCTTATTG-TCTGGTATCA)",
+ "D02 TWIST (TGTAGACGAA-AAGTATGCGT)",
+ "E02 TWIST (CTTGTCGTCG-TTCCTTCGAG)",
+ "F02 TWIST (TCGTCTTACA-GCTATGGATA)",
+ "G02 TWIST (GAGAGGAGGA-AGGTACCATT)",
+ "H02 TWIST (GTTAGATACC-TTACGGAGTC)",
+ "A03 TWIST (GGCTTAAGAA-TGAGGACTTA)",
+ "B03 TWIST (TCTGGTACAA-TTGAGTTGCC)",
+ "C03 TWIST (GTGAATTCGG-AGCTTCGCGA)",
+ "D03 TWIST (GAATGGAGAA-CATACGCCAG)",
+ "E03 TWIST (AGTCAATTGG-CAAGACCAGC)",
+ "F03 TWIST (CGCATCACCT-GATAGACAGT)",
+ "G03 TWIST (TATTGACACC-CGCTCGTGAA)",
+ "H03 TWIST (AGACTGTCGG-TCTCTAACAG)",
+ "A04 TWIST (ATCTGGACTC-ACCTAGGAGG)",
+ "B04 TWIST (GAGAATAAGG-TCTGTACCTT)",
+ "C04 TWIST (TGTTGTCGCC-CTCAGGCCAT)",
+ "D04 TWIST (CTGCGGTGTT-TTGTGCAGCC)",
+ "E04 TWIST (GATAACTCCG-TAGCCGAATC)",
+ "F04 TWIST (ATCCTTGTAC-AAGCCTGTTA)",
+ "G04 TWIST (TACGCGTATA-TGTACAGTAG)",
+ "H04 TWIST (CCACCAATTG-CGATTCTGCC)",
+ "A05 TWIST (TGTGAAGGCC-TTGCTAAGGA)",
+ "B05 TWIST (CCTTGACTGC-ACTCCTTGGC)",
+ "C05 TWIST (AATGCGTCGG-GAAGGCGAAC)",
+ "D05 TWIST (AAGACTACAC-CAATACCTTG)",
+ "E05 TWIST (GTCAGTGCAG-CGACGACAAG)",
+ "F05 TWIST (CTCACCAGAA-GAACCTGACC)",
+ "G05 TWIST (TCTCGTACTT-TTGCCTCGCA)",
+ "H05 TWIST (TCAGATTAGG-TTCGTGTCGA)",
+ "A06 TWIST (CACTCAAGAA-TGGATGGCAA)",
+ "B06 TWIST (AGAGCCATTC-TTCACCAGCT)",
+ "C06 TWIST (CACGATTCCG-CCTGAGTAGC)",
+ "D06 TWIST (TTGGAGCCTG-AGGTGTCCGT)",
+ "E06 TWIST (TTACGACTTG-GTCTGGTTGC)",
+ "F06 TWIST (TTAAGGTCGG-CTCTTAGATG)",
+ "G06 TWIST (GGTTCTGTCA-TATCACCTGC)",
+ "H06 TWIST (GATACGCACC-CAGAGGCAAG)",
+ "A07 TWIST (TCGCGAAGCT-CCGGTCAACA)",
+ "B07 TWIST (GTTAAGACGG-TCACGAGGTG)",
+ "C07 TWIST (CCGGTCATAC-CCATAGACAA)",
+ "D07 TWIST (GTCAGCTTAA-GAGCTTGGAC)",
+ "E07 TWIST (ACCGCGGATA-TACGGTGTTG)",
+ "F07 TWIST (GTTGCATCAA-TTCAACTCGA)",
+ "G07 TWIST (TGTGCACCAA-AAGGCAGGTA)",
+ "H07 TWIST (ATCTGTGGTC-CGGCCAATTC)",
+ "A08 TWIST (CACAAGATCC-CAACCGGACA)",
+ "B08 TWIST (CTGCTAGCTG-AACTTGGCCG)",
+ "C08 TWIST (ACCGGTCGAA-TGGAACATAG)",
+ "D08 TWIST (GCACGTTCTA-TTCGGATCTA)",
+ "E08 TWIST (AAGGAAGGAA-CGGAATCGTG)",
+ "F08 TWIST (AGAGAGATAG-TCTAATCGGT)",
+ "G08 TWIST (GGTTCCTATT-GCTGGAATTA)",
+ "H08 TWIST (TTCACGAGCG-CGCTTCTCAC)",
+ "A09 TWIST (GGCACAACCT-TAGACTCCTG)",
+ "B09 TWIST (TGACTCAGAA-CCGTTGATTG)",
+ "C09 TWIST (CGATCTCAGG-CGAACCTCCA)",
+ "D09 TWIST (CCTGCTGGAA-TTGGAAGTTG)",
+ "E09 TWIST (GAGCTGTATA-CCAGGAGTAC)",
+ "F09 TWIST (AACCTGACGG-AGGTTCGTCG)",
+ "G09 TWIST (AAGCTCGTGG-GACCTGAAGA)",
+ "H09 TWIST (GTCCAAGCTC-TTAACGCACA)",
+ "A10 TWIST (CTAGACTTCG-TCGGAGTTGG)",
+ "B10 TWIST (TCCAAGGTAA-CGATGACTCC)",
+ "C10 TWIST (CTTGGTAGCA-TATAGGTTGG)",
+ "D10 TWIST (AACGAGGCGT-GACAAGTGTT)",
+ "E10 TWIST (CAGAAGATGG-TTCTCCGGAA)",
+ "F10 TWIST (TGATACATCC-ACACACTCCG)",
+ "G10 TWIST (GCGCGTAGTT-CTGGTCACTA)",
+ "H10 TWIST (GTTGTCTGCG-TTCGTGCCAC)",
+ "A11 TWIST (CTTAGCGCTG-AGATCATGGA)",
+ "B11 TWIST (ATCAGCCTCC-GAGTATGTAC)",
+ "C11 TWIST (TGCAGTGCTC-TAGAACACCT)",
+ "D11 TWIST (GAGCTCAGAC-CCAGTTAAGA)",
+ "E11 TWIST (ACCTGGACAA-CGCTTATCTG)",
+ "F11 TWIST (CAACTTCCAA-GAGCTCTTAC)",
+ "G11 TWIST (CCATCCTGTG-TCTCAAGGCG)",
+ "H11 TWIST (GGCAGTTAGA-CTAAGTACCA)",
+ "A12 TWIST (TCACATGAGA-TCGACAAGCC)",
+ "B12 TWIST (TATTCGTTGG-TTCGACATCA)",
+ "C12 TWIST (AGCGGTCTTC-AGTGGTACTT)",
+ "D12 TWIST (GCGACCGATT-TTGCACTTGT)",
+ "E12 TWIST (GATCTCGTCC-GTCTTCGCAG)",
+ "F12 TWIST (CCATTATAGG-CAGGCTCCAA)",
+ "G12 TWIST (ACAGACCACG-CCAGGTTACG)",
+ "H12 TWIST (ATTCCACACA-CAATCGCCTA)",
+ ],
+}
diff --git a/cg/services/orders/validation/model_validator/model_validator.py b/cg/services/orders/validation/model_validator/model_validator.py
new file mode 100644
index 0000000000..5ff97d792f
--- /dev/null
+++ b/cg/services/orders/validation/model_validator/model_validator.py
@@ -0,0 +1,20 @@
+from typing import TypeVar
+
+from pydantic_core import ValidationError
+
+from cg.services.orders.validation.errors.validation_errors import ValidationErrors
+from cg.services.orders.validation.model_validator.utils import convert_errors
+from cg.services.orders.validation.models.order import Order
+
+ParsedOrder = TypeVar("ParsedOrder", bound=Order)
+
+
+class ModelValidator:
+
+ @staticmethod
+ def validate(order: dict, model: type[Order]) -> tuple[ParsedOrder | None, ValidationErrors]:
+ try:
+ order: Order = model.model_validate(order)
+ return order, ValidationErrors()
+ except ValidationError as error:
+ return None, convert_errors(pydantic_errors=error)
diff --git a/cg/services/orders/validation/model_validator/utils.py b/cg/services/orders/validation/model_validator/utils.py
new file mode 100644
index 0000000000..a4e736e955
--- /dev/null
+++ b/cg/services/orders/validation/model_validator/utils.py
@@ -0,0 +1,169 @@
+from pydantic_core import ErrorDetails, ValidationError
+
+from cg.services.orders.validation.errors.case_errors import CaseError
+from cg.services.orders.validation.errors.case_sample_errors import CaseSampleError
+from cg.services.orders.validation.errors.order_errors import OrderError
+from cg.services.orders.validation.errors.sample_errors import SampleError
+from cg.services.orders.validation.errors.validation_errors import ValidationErrors
+
+
+def convert_errors(pydantic_errors: ValidationError) -> ValidationErrors:
+ error_details: list[ErrorDetails] = pydantic_errors.errors()
+ order_errors: list[OrderError] = convert_order_errors(error_details)
+ case_errors: list[CaseError] = convert_case_errors(error_details=error_details)
+ case_sample_errors: list[CaseSampleError] = convert_case_sample_errors(
+ error_details=error_details
+ )
+ sample_errors: list[SampleError] = convert_sample_errors(error_details=error_details)
+ return ValidationErrors(
+ order_errors=order_errors,
+ case_errors=case_errors,
+ case_sample_errors=case_sample_errors,
+ sample_errors=sample_errors,
+ )
+
+
+def convert_order_errors(error_details: list[ErrorDetails]) -> list[OrderError]:
+ errors: list[OrderError] = []
+ order_details: list[ErrorDetails] = get_order_error_details(error_details)
+ for error_detail in order_details:
+ error: OrderError = create_order_error(error_detail)
+ errors.append(error)
+ return errors
+
+
+def convert_case_errors(error_details: list[ErrorDetails]) -> list[CaseError]:
+ errors: list[CaseError] = []
+ case_details: list[ErrorDetails] = get_case_error_details(error_details)
+ for error_detail in case_details:
+ error: CaseError = create_case_error(error_detail)
+ errors.append(error)
+ return errors
+
+
+def convert_sample_errors(error_details: list[ErrorDetails]) -> list[SampleError]:
+ errors: list[SampleError] = []
+ sample_details: list[ErrorDetails] = get_sample_error_details(error_details)
+ for error_detail in sample_details:
+ error: SampleError = create_sample_error(error_detail)
+ errors.append(error)
+ return errors
+
+
+def create_order_error(error: ErrorDetails) -> OrderError:
+ field_name: str = get_order_field_name(error)
+ message: str = get_error_message(error)
+ error = OrderError(field=field_name, message=message)
+ return error
+
+
+def create_sample_error(error: ErrorDetails) -> SampleError:
+ sample_index: int = get_sample_index(error)
+ field_name: str = get_sample_field_name(error)
+ message: str = get_error_message(error)
+ error = SampleError(sample_index=sample_index, field=field_name, message=message)
+ return error
+
+
+def create_case_error(error: ErrorDetails) -> CaseError:
+ case_index: int = get_case_index(error=error)
+ field_name: str = get_case_field_name(error)
+ message: str = get_error_message(error)
+ error = CaseError(case_index=case_index, field=field_name, message=message)
+ return error
+
+
+def convert_case_sample_errors(error_details: list[ErrorDetails]) -> list[CaseSampleError]:
+ errors: list[CaseSampleError] = []
+ case_sample_details: list[ErrorDetails] = get_case_sample_error_details(error_details)
+ for error_detail in case_sample_details:
+ error = create_case_sample_error(error_detail)
+ errors.append(error)
+ return errors
+
+
+def create_case_sample_error(error: ErrorDetails) -> CaseSampleError:
+ case_index: int = get_case_index(error=error)
+ sample_index: int = get_case_sample_index(error=error)
+ field_name: str = get_case_sample_field_name(error)
+ message: str = get_error_message(error)
+ error = CaseSampleError(
+ case_index=case_index,
+ sample_index=sample_index,
+ field=field_name,
+ message=message,
+ )
+ return error
+
+
+"""
+What follows below are ways of extracting data from a Pydantic ErrorDetails object. The aim is to find out
+where the error occurred, for which the 'loc' value (which is a tuple) can be used. It is generally structured in
+alternating strings and ints, specifying field names and list indices. An example:
+if loc = ('cases', 3, 'samples', 2, 'well_position'), that means that the error stems from the well_position of the
+third sample in the fourth case.
+"""
+
+
+def get_sample_error_details(error_details: list[ErrorDetails]) -> list[ErrorDetails]:
+ return [error for error in error_details if is_sample_error(error)]
+
+
+def get_case_error_details(error_details: list[ErrorDetails]) -> list[ErrorDetails]:
+ return [error for error in error_details if is_case_error(error)]
+
+
+def get_case_sample_error_details(error_details: list[ErrorDetails]) -> list[ErrorDetails]:
+ return [error for error in error_details if is_case_sample_error(error)]
+
+
+def get_order_error_details(error_details: list[ErrorDetails]) -> list[ErrorDetails]:
+ return [error for error in error_details if is_order_error(error)]
+
+
+def is_sample_error(error: ErrorDetails) -> bool:
+ return len(error["loc"]) == 3 and error["loc"][0] == "samples"
+
+
+def is_case_error(error: ErrorDetails) -> bool:
+ return len(error["loc"]) == 4 and error["loc"][0] == "cases"
+
+
+def is_case_sample_error(error: ErrorDetails) -> bool:
+ return len(error["loc"]) == 7
+
+
+def is_order_error(error: ErrorDetails) -> bool:
+ return len(error["loc"]) == 1
+
+
+def get_error_message(error: ErrorDetails) -> str:
+ return error["msg"]
+
+
+def get_sample_field_name(error: ErrorDetails) -> str:
+ return error["loc"][2]
+
+
+def get_case_field_name(error: ErrorDetails) -> str:
+ return error["loc"][3]
+
+
+def get_case_sample_field_name(error: ErrorDetails) -> str:
+ return error["loc"][6]
+
+
+def get_order_field_name(error: ErrorDetails) -> str:
+ return error["loc"][0]
+
+
+def get_sample_index(error: ErrorDetails) -> int:
+ return error["loc"][1]
+
+
+def get_case_index(error: ErrorDetails) -> int:
+ return error["loc"][1]
+
+
+def get_case_sample_index(error: ErrorDetails) -> int:
+ return error["loc"][4]
diff --git a/cg/services/orders/validation/models/__init__.py b/cg/services/orders/validation/models/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/cg/services/orders/validation/models/case.py b/cg/services/orders/validation/models/case.py
new file mode 100644
index 0000000000..6c7207b547
--- /dev/null
+++ b/cg/services/orders/validation/models/case.py
@@ -0,0 +1,66 @@
+from pydantic import BaseModel, Discriminator, Field, Tag, model_validator
+from typing_extensions import Annotated
+
+from cg.constants.priority import PriorityTerms
+from cg.models.orders.sample_base import NAME_PATTERN
+from cg.services.orders.validation.models.discriminators import has_internal_id
+from cg.services.orders.validation.models.existing_sample import ExistingSample
+from cg.services.orders.validation.models.sample_aliases import SampleInCase
+
+NewSample = Annotated[SampleInCase, Tag("new")]
+ExistingSampleType = Annotated[ExistingSample, Tag("existing")]
+
+
+class Case(BaseModel):
+ name: str = Field(pattern=NAME_PATTERN, min_length=2, max_length=128)
+ priority: PriorityTerms = PriorityTerms.STANDARD
+ samples: list[
+ Annotated[
+ NewSample | ExistingSampleType,
+ Discriminator(has_internal_id),
+ ]
+ ]
+
+ @property
+ def is_new(self) -> bool:
+ return True
+
+ @property
+ def enumerated_samples(self) -> enumerate[NewSample | ExistingSampleType]:
+ return enumerate(self.samples)
+
+ @property
+ def enumerated_new_samples(self) -> list[tuple[int, SampleInCase]]:
+ samples: list[tuple[int, SampleInCase]] = []
+ for sample_index, sample in self.enumerated_samples:
+ if sample.is_new:
+ samples.append((sample_index, sample))
+ return samples
+
+ @property
+ def enumerated_existing_samples(self) -> list[tuple[int, ExistingSample]]:
+ samples: list[tuple[int, ExistingSample]] = []
+ for sample_index, sample in self.enumerated_samples:
+ if not sample.is_new:
+ samples.append((sample_index, sample))
+ return samples
+
+ def get_sample(self, sample_name: str) -> SampleInCase | None:
+ for _, sample in self.enumerated_new_samples:
+ if sample.name == sample_name:
+ return sample
+
+ @model_validator(mode="before")
+ def convert_empty_strings_to_none(cls, data):
+ if isinstance(data, dict):
+ for key, value in data.items():
+ if value == "":
+ data[key] = None
+ return data
+
+ @model_validator(mode="after")
+ def set_case_name_on_new_samples(self):
+ """Sets the case name on new samples, so it can be easily fetched when stored in LIMS."""
+ for _, sample in self.enumerated_new_samples:
+ sample._case_name = self.name
+ return self
diff --git a/cg/services/orders/validation/models/case_aliases.py b/cg/services/orders/validation/models/case_aliases.py
new file mode 100644
index 0000000000..6ea53d6895
--- /dev/null
+++ b/cg/services/orders/validation/models/case_aliases.py
@@ -0,0 +1,6 @@
+from cg.services.orders.validation.workflows.mip_dna.models.case import MipDnaCase
+from cg.services.orders.validation.workflows.tomte.models.case import TomteCase
+
+CaseContainingRelatives = TomteCase | MipDnaCase
+
+CaseWithSkipRC = TomteCase | MipDnaCase
diff --git a/cg/services/orders/validation/models/discriminators.py b/cg/services/orders/validation/models/discriminators.py
new file mode 100644
index 0000000000..272126cd74
--- /dev/null
+++ b/cg/services/orders/validation/models/discriminators.py
@@ -0,0 +1,7 @@
+from typing import Any
+
+
+def has_internal_id(v: Any) -> str:
+ if isinstance(v, dict):
+ return "existing" if v.get("internal_id") else "new"
+ return "existing" if getattr(v, "internal_id", None) else "new"
diff --git a/cg/services/orders/validation/models/existing_case.py b/cg/services/orders/validation/models/existing_case.py
new file mode 100644
index 0000000000..3bb7a508de
--- /dev/null
+++ b/cg/services/orders/validation/models/existing_case.py
@@ -0,0 +1,10 @@
+from pydantic import BaseModel
+
+
+class ExistingCase(BaseModel):
+ internal_id: str
+ panels: list[str] | None = None
+
+ @property
+ def is_new(self) -> bool:
+ return False
diff --git a/cg/services/orders/validation/models/existing_sample.py b/cg/services/orders/validation/models/existing_sample.py
new file mode 100644
index 0000000000..87e8febd28
--- /dev/null
+++ b/cg/services/orders/validation/models/existing_sample.py
@@ -0,0 +1,14 @@
+from pydantic import BaseModel, Field
+
+from cg.models.orders.sample_base import NAME_PATTERN, StatusEnum
+
+
+class ExistingSample(BaseModel):
+ father: str | None = Field(None, pattern=NAME_PATTERN)
+ internal_id: str
+ mother: str | None = Field(None, pattern=NAME_PATTERN)
+ status: StatusEnum | None = None
+
+ @property
+ def is_new(self) -> bool:
+ return False
diff --git a/cg/services/orders/validation/models/order.py b/cg/services/orders/validation/models/order.py
new file mode 100644
index 0000000000..ccc3d74341
--- /dev/null
+++ b/cg/services/orders/validation/models/order.py
@@ -0,0 +1,25 @@
+from pydantic import BaseModel, BeforeValidator, Field, PrivateAttr, model_validator
+from typing_extensions import Annotated
+
+from cg.constants import DataDelivery
+from cg.models.orders.constants import OrderType
+from cg.services.orders.validation.models.utils import set_null_to_false
+
+
+class Order(BaseModel):
+ comment: str | None = None
+ customer: str = Field(min_length=1)
+ delivery_type: DataDelivery
+ order_type: OrderType = Field(alias="project_type")
+ name: str = Field(min_length=1)
+ skip_reception_control: Annotated[bool, BeforeValidator(set_null_to_false)] = False
+ _generated_ticket_id: int | None = PrivateAttr(default=None)
+ _user_id: int = PrivateAttr(default=None)
+
+ @model_validator(mode="before")
+ def convert_empty_strings_to_none(cls, data):
+ if isinstance(data, dict):
+ for key, value in data.items():
+ if value == "":
+ data[key] = None
+ return data
diff --git a/cg/services/orders/validation/models/order_aliases.py b/cg/services/orders/validation/models/order_aliases.py
new file mode 100644
index 0000000000..237d1427a4
--- /dev/null
+++ b/cg/services/orders/validation/models/order_aliases.py
@@ -0,0 +1,12 @@
+from cg.services.orders.validation.workflows.fluffy.models.order import FluffyOrder
+from cg.services.orders.validation.workflows.metagenome.models.sample import MetagenomeSample
+from cg.services.orders.validation.workflows.microbial_fastq.models.order import MicrobialFastqOrder
+from cg.services.orders.validation.workflows.microsalt.models.order import MicrosaltOrder
+from cg.services.orders.validation.workflows.mutant.models.order import MutantOrder
+from cg.services.orders.validation.workflows.rml.models.order import RmlOrder
+from cg.services.orders.validation.workflows.taxprofiler.models.sample import TaxprofilerSample
+
+OrderWithIndexedSamples = FluffyOrder | RmlOrder
+OrderWithControlSamples = (
+ MetagenomeSample | MicrobialFastqOrder | MicrosaltOrder | MutantOrder | TaxprofilerSample
+)
diff --git a/cg/services/orders/validation/models/order_with_cases.py b/cg/services/orders/validation/models/order_with_cases.py
new file mode 100644
index 0000000000..d440a35018
--- /dev/null
+++ b/cg/services/orders/validation/models/order_with_cases.py
@@ -0,0 +1,43 @@
+from pydantic import Discriminator, Tag
+from typing_extensions import Annotated
+
+from cg.services.orders.validation.models.case import Case
+from cg.services.orders.validation.models.discriminators import has_internal_id
+from cg.services.orders.validation.models.existing_case import ExistingCase
+from cg.services.orders.validation.models.order import Order
+from cg.services.orders.validation.models.sample import Sample
+
+NewCaseType = Annotated[Case, Tag("new")]
+ExistingCaseType = Annotated[ExistingCase, Tag("existing")]
+
+
+class OrderWithCases(Order):
+ cases: list[Annotated[NewCaseType | ExistingCaseType, Discriminator(has_internal_id)]]
+
+ @property
+ def enumerated_cases(self) -> enumerate[Case | ExistingCase]:
+ return enumerate(self.cases)
+
+ @property
+ def enumerated_new_cases(self) -> list[tuple[int, Case]]:
+ cases: list[tuple[int, Case]] = []
+ for case_index, case in self.enumerated_cases:
+ if case.is_new:
+ cases.append((case_index, case))
+ return cases
+
+ @property
+ def enumerated_existing_cases(self) -> list[tuple[int, ExistingCase]]:
+ cases: list[tuple[int, ExistingCase]] = []
+ for case_index, case in self.enumerated_cases:
+ if not case.is_new:
+ cases.append((case_index, case))
+ return cases
+
+ @property
+ def enumerated_new_samples(self) -> list[tuple[int, int, Sample]]:
+ return [
+ (case_index, sample_index, sample)
+ for case_index, case in self.enumerated_new_cases
+ for sample_index, sample in case.enumerated_new_samples
+ ]
diff --git a/cg/services/orders/validation/models/order_with_samples.py b/cg/services/orders/validation/models/order_with_samples.py
new file mode 100644
index 0000000000..91f228d825
--- /dev/null
+++ b/cg/services/orders/validation/models/order_with_samples.py
@@ -0,0 +1,10 @@
+from cg.services.orders.validation.models.order import Order
+from cg.services.orders.validation.models.sample import Sample
+
+
+class OrderWithSamples(Order):
+ samples: list[Sample]
+
+ @property
+ def enumerated_samples(self) -> enumerate[Sample]:
+ return enumerate(self.samples)
diff --git a/cg/services/orders/validation/models/sample.py b/cg/services/orders/validation/models/sample.py
new file mode 100644
index 0000000000..21dcfa453e
--- /dev/null
+++ b/cg/services/orders/validation/models/sample.py
@@ -0,0 +1,38 @@
+from pydantic import BaseModel, Field, PrivateAttr, model_validator
+
+from cg.models.orders.sample_base import NAME_PATTERN, ContainerEnum
+
+
+class Sample(BaseModel):
+ application: str = Field(min_length=1)
+ _case_name: str = PrivateAttr(default="")
+ comment: str | None = None
+ container: ContainerEnum
+ container_name: str | None = None
+ _generated_lims_id: str | None = PrivateAttr(default=None) # Will be populated by LIMS
+ name: str = Field(pattern=NAME_PATTERN, min_length=2, max_length=128)
+ volume: int | None = None
+ well_position: str | None = None
+
+ @property
+ def is_new(self) -> bool:
+ return True
+
+ @property
+ def is_on_plate(self) -> bool:
+ return self.container == ContainerEnum.plate
+
+ @model_validator(mode="before")
+ @classmethod
+ def convert_empty_strings_to_none(cls, data):
+ if isinstance(data, dict):
+ for key, value in data.items():
+ if value == "":
+ data[key] = None
+ return data
+
+ @model_validator(mode="after")
+ def set_tube_name_default(self):
+ if self.container == ContainerEnum.tube and not self.container_name:
+ self.container_name = self.name
+ return self
diff --git a/cg/services/orders/validation/models/sample_aliases.py b/cg/services/orders/validation/models/sample_aliases.py
new file mode 100644
index 0000000000..f89c26df76
--- /dev/null
+++ b/cg/services/orders/validation/models/sample_aliases.py
@@ -0,0 +1,23 @@
+from cg.services.orders.validation.workflows.balsamic.models.sample import BalsamicSample
+from cg.services.orders.validation.workflows.balsamic_umi.models.sample import BalsamicUmiSample
+from cg.services.orders.validation.workflows.fastq.models.sample import FastqSample
+from cg.services.orders.validation.workflows.fluffy.models.sample import FluffySample
+from cg.services.orders.validation.workflows.mip_dna.models.sample import MipDnaSample
+from cg.services.orders.validation.workflows.mip_rna.models.sample import MipRnaSample
+from cg.services.orders.validation.workflows.rml.models.sample import RmlSample
+from cg.services.orders.validation.workflows.rna_fusion.models.sample import RnaFusionSample
+from cg.services.orders.validation.workflows.tomte.models.sample import TomteSample
+
+HumanSample = (
+ BalsamicSample | BalsamicUmiSample | FastqSample | MipDnaSample | RnaFusionSample | TomteSample
+)
+
+IndexedSample = FluffySample | RmlSample
+
+SampleInCase = (
+ BalsamicSample | BalsamicUmiSample | MipDnaSample | MipRnaSample | RnaFusionSample | TomteSample
+)
+
+SampleWithRelatives = TomteSample | MipDnaSample
+
+SampleWithSkipRC = TomteSample | MipDnaSample | FastqSample
diff --git a/cg/services/orders/validation/models/utils.py b/cg/services/orders/validation/models/utils.py
new file mode 100644
index 0000000000..1d8f27404c
--- /dev/null
+++ b/cg/services/orders/validation/models/utils.py
@@ -0,0 +1,2 @@
+def set_null_to_false(value: bool | None) -> bool:
+ return value if value else False
diff --git a/cg/services/orders/validation/order_type_maps.py b/cg/services/orders/validation/order_type_maps.py
new file mode 100644
index 0000000000..dd2a31379b
--- /dev/null
+++ b/cg/services/orders/validation/order_type_maps.py
@@ -0,0 +1,143 @@
+from pydantic import BaseModel, ConfigDict
+
+from cg.models.orders.constants import OrderType
+from cg.services.orders.validation.models.order import Order
+from cg.services.orders.validation.workflows.balsamic.models.order import BalsamicOrder
+from cg.services.orders.validation.workflows.balsamic.validation_rules import (
+ BALSAMIC_CASE_RULES,
+ BALSAMIC_CASE_SAMPLE_RULES,
+)
+from cg.services.orders.validation.workflows.balsamic_umi.models.order import BalsamicUmiOrder
+from cg.services.orders.validation.workflows.balsamic_umi.validation_rules import (
+ BALSAMIC_UMI_CASE_RULES,
+ BALSAMIC_UMI_CASE_SAMPLE_RULES,
+)
+from cg.services.orders.validation.workflows.fastq.models.order import FastqOrder
+from cg.services.orders.validation.workflows.fastq.validation_rules import FASTQ_SAMPLE_RULES
+from cg.services.orders.validation.workflows.fluffy.models.order import FluffyOrder
+from cg.services.orders.validation.workflows.fluffy.validation_rules import FLUFFY_SAMPLE_RULES
+from cg.services.orders.validation.workflows.metagenome.models.order import MetagenomeOrder
+from cg.services.orders.validation.workflows.metagenome.validation_rules import (
+ METAGENOME_SAMPLE_RULES,
+)
+from cg.services.orders.validation.workflows.microbial_fastq.models.order import MicrobialFastqOrder
+from cg.services.orders.validation.workflows.microbial_fastq.validation_rules import (
+ MICROBIAL_FASTQ_SAMPLE_RULES,
+)
+from cg.services.orders.validation.workflows.microsalt.models.order import MicrosaltOrder
+from cg.services.orders.validation.workflows.microsalt.validation_rules import (
+ MICROSALT_SAMPLE_RULES,
+)
+from cg.services.orders.validation.workflows.mip_dna.models.order import MipDnaOrder
+from cg.services.orders.validation.workflows.mip_dna.validation_rules import (
+ MIP_DNA_CASE_RULES,
+ MIP_DNA_CASE_SAMPLE_RULES,
+)
+from cg.services.orders.validation.workflows.mip_rna.models.order import MipRnaOrder
+from cg.services.orders.validation.workflows.mip_rna.validation_rules import (
+ MIP_RNA_CASE_RULES,
+ MIP_RNA_CASE_SAMPLE_RULES,
+)
+from cg.services.orders.validation.workflows.mutant.models.order import MutantOrder
+from cg.services.orders.validation.workflows.mutant.validation_rules import MUTANT_SAMPLE_RULES
+from cg.services.orders.validation.workflows.order_validation_rules import ORDER_RULES
+from cg.services.orders.validation.workflows.pacbio_long_read.models.order import PacbioOrder
+from cg.services.orders.validation.workflows.pacbio_long_read.validation_rules import (
+ PACBIO_LONG_READ_SAMPLE_RULES,
+)
+from cg.services.orders.validation.workflows.rml.models.order import RmlOrder
+from cg.services.orders.validation.workflows.rml.validation_rules import RML_SAMPLE_RULES
+from cg.services.orders.validation.workflows.rna_fusion.models.order import RnaFusionOrder
+from cg.services.orders.validation.workflows.rna_fusion.validation_rules import (
+ RNAFUSION_CASE_RULES,
+ RNAFUSION_CASE_SAMPLE_RULES,
+)
+from cg.services.orders.validation.workflows.taxprofiler.models.order import TaxprofilerOrder
+from cg.services.orders.validation.workflows.taxprofiler.validation_rules import (
+ TAXPROFILER_SAMPLE_RULES,
+)
+from cg.services.orders.validation.workflows.tomte.models.order import TomteOrder
+from cg.services.orders.validation.workflows.tomte.validation_rules import (
+ TOMTE_CASE_RULES,
+ TOMTE_CASE_SAMPLE_RULES,
+)
+
+
+class RuleSet(BaseModel):
+ case_rules: list[callable] = []
+ case_sample_rules: list[callable] = []
+ order_rules: list[callable] = ORDER_RULES
+ sample_rules: list[callable] = []
+
+ model_config = ConfigDict(arbitrary_types_allowed=True)
+
+
+ORDER_TYPE_RULE_SET_MAP: dict[OrderType, RuleSet] = {
+ OrderType.BALSAMIC: RuleSet(
+ case_rules=BALSAMIC_CASE_RULES, case_sample_rules=BALSAMIC_CASE_SAMPLE_RULES
+ ),
+ OrderType.BALSAMIC_UMI: RuleSet(
+ case_rules=BALSAMIC_UMI_CASE_RULES,
+ case_sample_rules=BALSAMIC_UMI_CASE_SAMPLE_RULES,
+ ),
+ OrderType.FASTQ: RuleSet(
+ sample_rules=FASTQ_SAMPLE_RULES,
+ ),
+ OrderType.FLUFFY: RuleSet(
+ sample_rules=FLUFFY_SAMPLE_RULES,
+ ),
+ OrderType.METAGENOME: RuleSet(
+ sample_rules=METAGENOME_SAMPLE_RULES,
+ ),
+ OrderType.MICROBIAL_FASTQ: RuleSet(
+ sample_rules=MICROBIAL_FASTQ_SAMPLE_RULES,
+ ),
+ OrderType.MICROSALT: RuleSet(
+ sample_rules=MICROSALT_SAMPLE_RULES,
+ ),
+ OrderType.MIP_DNA: RuleSet(
+ case_rules=MIP_DNA_CASE_RULES, case_sample_rules=MIP_DNA_CASE_SAMPLE_RULES
+ ),
+ OrderType.MIP_RNA: RuleSet(
+ case_rules=MIP_RNA_CASE_RULES,
+ case_sample_rules=MIP_RNA_CASE_SAMPLE_RULES,
+ ),
+ OrderType.PACBIO_LONG_READ: RuleSet(
+ sample_rules=PACBIO_LONG_READ_SAMPLE_RULES,
+ ),
+ OrderType.RML: RuleSet(
+ sample_rules=RML_SAMPLE_RULES,
+ ),
+ OrderType.RNAFUSION: RuleSet(
+ case_rules=RNAFUSION_CASE_RULES,
+ case_sample_rules=RNAFUSION_CASE_SAMPLE_RULES,
+ ),
+ OrderType.SARS_COV_2: RuleSet(
+ sample_rules=MUTANT_SAMPLE_RULES,
+ ),
+ OrderType.TAXPROFILER: RuleSet(
+ sample_rules=TAXPROFILER_SAMPLE_RULES,
+ ),
+ OrderType.TOMTE: RuleSet(
+ case_rules=TOMTE_CASE_RULES,
+ case_sample_rules=TOMTE_CASE_SAMPLE_RULES,
+ ),
+}
+
+ORDER_TYPE_MODEL_MAP: dict[OrderType, type[Order]] = {
+ OrderType.BALSAMIC: BalsamicOrder,
+ OrderType.BALSAMIC_UMI: BalsamicUmiOrder,
+ OrderType.FASTQ: FastqOrder,
+ OrderType.FLUFFY: FluffyOrder,
+ OrderType.METAGENOME: MetagenomeOrder,
+ OrderType.MICROBIAL_FASTQ: MicrobialFastqOrder,
+ OrderType.MICROSALT: MicrosaltOrder,
+ OrderType.MIP_DNA: MipDnaOrder,
+ OrderType.MIP_RNA: MipRnaOrder,
+ OrderType.PACBIO_LONG_READ: PacbioOrder,
+ OrderType.RML: RmlOrder,
+ OrderType.RNAFUSION: RnaFusionOrder,
+ OrderType.SARS_COV_2: MutantOrder,
+ OrderType.TAXPROFILER: TaxprofilerOrder,
+ OrderType.TOMTE: TomteOrder,
+}
diff --git a/cg/services/orders/validation/response_mapper.py b/cg/services/orders/validation/response_mapper.py
new file mode 100644
index 0000000000..48c6537101
--- /dev/null
+++ b/cg/services/orders/validation/response_mapper.py
@@ -0,0 +1,103 @@
+from typing import Any
+
+from cg.services.orders.validation.errors.case_errors import CaseError
+from cg.services.orders.validation.errors.case_sample_errors import CaseSampleError
+from cg.services.orders.validation.errors.order_errors import OrderError
+from cg.services.orders.validation.errors.sample_errors import SampleError
+from cg.services.orders.validation.errors.validation_errors import ValidationErrors
+
+
+def create_order_validation_response(raw_order: dict, errors: ValidationErrors) -> dict:
+ """Ensures each field in the order looks like: {value: raw value, errors: [errors]}"""
+ wrap_fields(raw_order)
+ map_errors_to_order(order=raw_order, errors=errors)
+ return raw_order
+
+
+def map_errors_to_order(order: dict, errors: ValidationErrors) -> None:
+ map_order_errors(order=order, errors=errors.order_errors)
+ map_case_errors(order=order, errors=errors.case_errors)
+ map_case_sample_errors(order=order, errors=errors.case_sample_errors)
+ map_sample_errors(order=order, errors=errors.sample_errors)
+
+
+def map_order_errors(order: dict, errors: list[OrderError]) -> None:
+ for error in errors:
+ add_error(entity=order, field=error.field, message=error.message)
+
+
+def map_case_errors(order: dict, errors: list[CaseError]) -> None:
+ for error in errors:
+ case: dict = get_case(order=order, index=error.case_index)
+ add_error(entity=case, field=error.field, message=error.message)
+
+
+def map_case_sample_errors(order: dict, errors: list[CaseSampleError]) -> None:
+ for error in errors:
+ case: dict = get_case(order=order, index=error.case_index)
+ sample: dict = get_case_sample(case=case, index=error.sample_index)
+ add_error(entity=sample, field=error.field, message=error.message)
+
+
+def map_sample_errors(order: dict, errors: list[SampleError]) -> None:
+ for error in errors:
+ sample: dict = get_sample(order=order, index=error.sample_index)
+ add_error(entity=sample, field=error.field, message=error.message)
+
+
+def add_error(entity: dict, field: str, message: str) -> None:
+ if not entity.get(field):
+ set_field(entity=entity, field=field, value=None)
+ if field == "sample_errors":
+ # Special handling for sample errors since the 'value' corresponds to whether it is set
+ entity[field]["value"] = True
+ entity[field]["errors"].append(message)
+
+
+def get_case(order: dict, index: int) -> dict:
+ return order["cases"][index]
+
+
+def get_case_sample(case: dict, index: int) -> dict:
+ return case["samples"][index]
+
+
+def get_sample(order: dict, index: int) -> dict:
+ return order["samples"][index]
+
+
+def wrap_fields(raw_order: dict) -> None:
+ wrap_order_fields(raw_order)
+ if raw_order.get("cases"):
+ wrap_case_and_sample_fields(raw_order)
+ else:
+ wrap_sample_fields(raw_order["samples"])
+
+
+def wrap_order_fields(raw_order: dict) -> None:
+ for field, value in raw_order.items():
+ if field not in {"cases", "samples"}:
+ set_field(entity=raw_order, field=field, value=value)
+
+
+def wrap_case_and_sample_fields(raw_order: dict) -> None:
+ for case in raw_order["cases"]:
+ wrap_case_fields(case)
+ wrap_sample_fields(case["samples"])
+
+
+def wrap_case_fields(case: dict) -> None:
+ for field, value in case.items():
+ if field != "samples":
+ set_field(entity=case, field=field, value=value)
+ set_field(entity=case, field="sample_errors", value=False)
+
+
+def wrap_sample_fields(samples: list[dict]) -> None:
+ for sample in samples:
+ for field, value in sample.items():
+ set_field(entity=sample, field=field, value=value)
+
+
+def set_field(entity: dict, field: str, value: Any) -> None:
+ entity[field] = {"value": value, "errors": []}
diff --git a/cg/services/orders/validation/rules/__init__.py b/cg/services/orders/validation/rules/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/cg/services/orders/validation/rules/case/__init__.py b/cg/services/orders/validation/rules/case/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/cg/services/orders/validation/rules/case/rules.py b/cg/services/orders/validation/rules/case/rules.py
new file mode 100644
index 0000000000..897648fd35
--- /dev/null
+++ b/cg/services/orders/validation/rules/case/rules.py
@@ -0,0 +1,125 @@
+from cg.services.orders.validation.errors.case_errors import (
+ CaseDoesNotExistError,
+ CaseNameNotAvailableError,
+ CaseOutsideOfCollaborationError,
+ DoubleNormalError,
+ DoubleTumourError,
+ MoreThanTwoSamplesInCaseError,
+ MultipleSamplesInCaseError,
+ NumberOfNormalSamplesError,
+ RepeatedCaseNameError,
+ RepeatedGenePanelsError,
+)
+from cg.services.orders.validation.models.case import Case
+from cg.services.orders.validation.models.order_with_cases import OrderWithCases
+from cg.services.orders.validation.rules.case.utils import (
+ contains_duplicates,
+ is_case_not_from_collaboration,
+ is_double_normal,
+ is_double_tumour,
+)
+from cg.services.orders.validation.rules.case_sample.utils import get_repeated_case_name_errors
+from cg.services.orders.validation.workflows.balsamic.models.order import BalsamicOrder
+from cg.services.orders.validation.workflows.balsamic_umi.models.order import BalsamicUmiOrder
+from cg.store.store import Store
+
+
+def validate_gene_panels_unique(order: OrderWithCases, **kwargs) -> list[RepeatedGenePanelsError]:
+ errors: list[RepeatedGenePanelsError] = []
+ for case_index, case in order.enumerated_new_cases:
+ if contains_duplicates(case.panels):
+ error = RepeatedGenePanelsError(case_index=case_index)
+ errors.append(error)
+ return errors
+
+
+def validate_case_names_available(
+ order: OrderWithCases,
+ store: Store,
+ **kwargs,
+) -> list[CaseNameNotAvailableError]:
+ errors: list[CaseNameNotAvailableError] = []
+ customer = store.get_customer_by_internal_id(order.customer)
+ for case_index, case in order.enumerated_new_cases:
+ if store.get_case_by_name_and_customer(case_name=case.name, customer=customer):
+ error = CaseNameNotAvailableError(case_index=case_index)
+ errors.append(error)
+ return errors
+
+
+def validate_case_internal_ids_exist(
+ order: OrderWithCases,
+ store: Store,
+ **kwargs,
+) -> list[CaseDoesNotExistError]:
+ errors: list[CaseDoesNotExistError] = []
+ for case_index, case in order.enumerated_existing_cases:
+ case: Case | None = store.get_case_by_internal_id(case.internal_id)
+ if not case:
+ error = CaseDoesNotExistError(case_index=case_index)
+ errors.append(error)
+ return errors
+
+
+def validate_existing_cases_belong_to_collaboration(
+ order: OrderWithCases,
+ store: Store,
+ **kwargs,
+) -> list[CaseOutsideOfCollaborationError]:
+ """Validates that all existing cases within the order belong to a customer
+ within the order's customer's collaboration."""
+ errors: list[CaseOutsideOfCollaborationError] = []
+ for case_index, case in order.enumerated_existing_cases:
+ if is_case_not_from_collaboration(case=case, customer_id=order.customer, store=store):
+ error = CaseOutsideOfCollaborationError(case_index=case_index)
+ errors.append(error)
+ return errors
+
+
+def validate_case_names_not_repeated(
+ order: OrderWithCases,
+ **kwargs,
+) -> list[RepeatedCaseNameError]:
+ return get_repeated_case_name_errors(order)
+
+
+def validate_one_sample_per_case(
+ order: OrderWithCases, **kwargs
+) -> list[MultipleSamplesInCaseError]:
+ """Validates that there is only one sample in each case.
+ Only applicable to RNAFusion."""
+ errors: list[MultipleSamplesInCaseError] = []
+ for case_index, case in order.enumerated_new_cases:
+ if len(case.samples) > 1:
+ error = MultipleSamplesInCaseError(case_index=case_index)
+ errors.append(error)
+ return errors
+
+
+def validate_at_most_two_samples_per_case(
+ order: OrderWithCases, **kwargs
+) -> list[MoreThanTwoSamplesInCaseError]:
+ """Validates that there is at most two samples in each case.
+ Only applicable to Balsamic and Balsamic-UMI."""
+ errors: list[MoreThanTwoSamplesInCaseError] = []
+ for case_index, case in order.enumerated_new_cases:
+ if len(case.samples) > 2:
+ error = MoreThanTwoSamplesInCaseError(case_index=case_index)
+ errors.append(error)
+ return errors
+
+
+def validate_number_of_normal_samples(
+ order: BalsamicOrder | BalsamicUmiOrder, store: Store, **kwargs
+) -> list[NumberOfNormalSamplesError]:
+ """Validates that Balsamic cases with pairs of samples contain one tumour and one normal sample.
+ Only applicable to Balsamic and Balsamic-UMI."""
+ errors: list[NumberOfNormalSamplesError] = []
+ for case_index, case in order.enumerated_new_cases:
+ if is_double_normal(case=case, store=store):
+ error = DoubleNormalError(case_index=case_index)
+ errors.append(error)
+ elif is_double_tumour(case=case, store=store):
+ error = DoubleTumourError(case_index=case_index)
+ errors.append(error)
+ return errors
diff --git a/cg/services/orders/validation/rules/case/utils.py b/cg/services/orders/validation/rules/case/utils.py
new file mode 100644
index 0000000000..350082f884
--- /dev/null
+++ b/cg/services/orders/validation/rules/case/utils.py
@@ -0,0 +1,36 @@
+from cg.services.orders.validation.models.existing_case import ExistingCase
+from cg.services.orders.validation.workflows.balsamic.models.case import BalsamicCase
+from cg.services.orders.validation.workflows.balsamic_umi.models.case import BalsamicUmiCase
+from cg.store.models import Case as DbCase
+from cg.store.models import Customer, Sample
+from cg.store.store import Store
+
+
+def contains_duplicates(input_list: list) -> bool:
+ return len(set(input_list)) != len(input_list)
+
+
+def is_double_tumour(case: BalsamicCase | BalsamicUmiCase, store: Store) -> bool:
+ return len(case.samples) == 2 and get_number_of_tumours(case=case, store=store) == 2
+
+
+def is_double_normal(case: BalsamicCase | BalsamicUmiCase, store: Store) -> bool:
+ return len(case.samples) == 2 and get_number_of_tumours(case=case, store=store) == 0
+
+
+def get_number_of_tumours(case: BalsamicCase | BalsamicUmiCase, store: Store) -> int:
+ number_of_tumours = 0
+ for sample in case.samples:
+ if sample.is_new and sample.tumour:
+ number_of_tumours += 1
+ elif not sample.is_new:
+ db_sample: Sample = store.get_sample_by_internal_id(sample.internal_id)
+ if db_sample.is_tumour:
+ number_of_tumours += 1
+ return number_of_tumours
+
+
+def is_case_not_from_collaboration(case: ExistingCase, customer_id: str, store: Store) -> bool:
+ db_case: DbCase | None = store.get_case_by_internal_id(case.internal_id)
+ customer: Customer | None = store.get_customer_by_internal_id(customer_id)
+ return db_case and customer and db_case.customer not in customer.collaborators
diff --git a/cg/services/orders/validation/rules/case_sample/__init__.py b/cg/services/orders/validation/rules/case_sample/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/cg/services/orders/validation/rules/case_sample/pedigree/__init__.py b/cg/services/orders/validation/rules/case_sample/pedigree/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/cg/services/orders/validation/rules/case_sample/pedigree/models.py b/cg/services/orders/validation/rules/case_sample/pedigree/models.py
new file mode 100644
index 0000000000..e4a3129e62
--- /dev/null
+++ b/cg/services/orders/validation/rules/case_sample/pedigree/models.py
@@ -0,0 +1,79 @@
+from cg.services.orders.validation.models.case_aliases import CaseContainingRelatives
+from cg.services.orders.validation.models.existing_sample import ExistingSample
+from cg.services.orders.validation.workflows.mip_dna.models.sample import MipDnaSample
+from cg.services.orders.validation.workflows.tomte.models.sample import TomteSample
+from cg.store.store import Store
+
+SampleWithParents = TomteSample | MipDnaSample | ExistingSample
+
+
+class Node:
+ """
+ This class is used to represent the samples in the family tree graph. The variables 'mother' and
+ 'father' refer to other nodes in the family tree, and can be thought of as an edge in the graph.
+ Because the 'mother' and 'father' are tracked using the sample's _name_ in the order, and
+ because said name is not set in the ExistingSample model, we require the sample name as a
+ separate input.
+ """
+
+ def __init__(
+ self,
+ sample: SampleWithParents,
+ case_index: int,
+ sample_index: int,
+ sample_name: str,
+ ):
+ self.sample: SampleWithParents = sample
+ self.sample_name: str = sample_name
+ self.sample_index: int = sample_index
+ self.case_index: int = case_index
+ self.father: Node | None = None
+ self.mother: Node | None = None
+ self.visited = False
+ self.in_current_path = False
+
+
+class FamilyTree:
+ """
+ This class is a directed graph representing a family tree from a submitted order with specified
+ mothers and fathers. Each node represents a sample, and each node has a property 'mother' and
+ a property 'father' referring to other nodes in the graph. These may be thought of as the
+ graph's edges.
+ """
+
+ def __init__(self, case: CaseContainingRelatives, case_index: int, store: Store):
+ self.graph: dict[str, Node] = {}
+ self.case: CaseContainingRelatives = case
+ self.case_index: int = case_index
+ self.store = store
+ self._add_nodes()
+ self._add_edges()
+
+ def _add_nodes(self) -> None:
+ """Add a node to the graph for each sample in the graph. For existing samples, the name
+ is fetched from StatusDB."""
+ for sample_index, sample in self.case.enumerated_samples:
+ if sample.is_new:
+ sample_name = sample.name
+ else:
+ sample_name = self.store.get_sample_by_internal_id(sample.internal_id).name
+ node = Node(
+ sample=sample,
+ sample_index=sample_index,
+ case_index=self.case_index,
+ sample_name=sample_name,
+ )
+ self.graph[sample_name] = node
+
+ def _add_edges(self) -> None:
+ """Add edges to the graph by populating each node's 'mother' and 'father' property."""
+ for node in self.graph.values():
+ sample: SampleWithParents = node.sample
+ if sample.mother:
+ node.mother = self.graph.get(sample.mother)
+ if sample.father:
+ node.father = self.graph.get(sample.father)
+
+ @property
+ def nodes(self) -> list[Node]:
+ return list(self.graph.values())
diff --git a/cg/services/orders/validation/rules/case_sample/pedigree/utils.py b/cg/services/orders/validation/rules/case_sample/pedigree/utils.py
new file mode 100644
index 0000000000..51b42a7c97
--- /dev/null
+++ b/cg/services/orders/validation/rules/case_sample/pedigree/utils.py
@@ -0,0 +1,64 @@
+from cg.constants.pedigree import Pedigree
+from cg.services.orders.validation.errors.case_sample_errors import (
+ DescendantAsFatherError,
+ DescendantAsMotherError,
+ PedigreeError,
+ SampleIsOwnFatherError,
+ SampleIsOwnMotherError,
+)
+from cg.services.orders.validation.models.existing_sample import ExistingSample
+from cg.services.orders.validation.rules.case_sample.pedigree.models import FamilyTree, Node
+from cg.services.orders.validation.workflows.mip_dna.models.sample import MipDnaSample
+from cg.services.orders.validation.workflows.tomte.models.sample import TomteSample
+
+
+def validate_tree(pedigree: FamilyTree) -> list[PedigreeError]:
+ """This performs a DFS algorithm on the family tree to find any cycles, which indicates an
+ order error."""
+ errors: list[PedigreeError] = []
+ for node in pedigree.nodes:
+ if not node.visited:
+ detect_cycles(node=node, errors=errors)
+ return errors
+
+
+def detect_cycles(node: Node, errors: list[PedigreeError]) -> None:
+ """Detect cycles in the pedigree graph using depth-first search. If a cycle is detected,
+ this is considered an error."""
+ node.visited = True
+ node.in_current_path = True
+
+ parents: dict[str, Node] = {Pedigree.MOTHER: node.mother, Pedigree.FATHER: node.father}
+
+ for parent_type, parent in parents.items():
+ if parent and parent.in_current_path:
+ error: PedigreeError = get_error(node=node, parent_type=parent_type)
+ errors.append(error)
+ elif parent and not parent.visited:
+ detect_cycles(node=parent, errors=errors)
+ node.in_current_path = False
+
+
+def get_error(node: Node, parent_type: str) -> PedigreeError:
+ if parent_type == Pedigree.MOTHER:
+ return get_mother_error(node)
+ if parent_type == Pedigree.FATHER:
+ return get_father_error(node)
+
+
+def get_mother_error(node: Node) -> PedigreeError:
+ """Called when the node's 'mother' creates a cycle in the family tree. For clearer feedback
+ we distinguish between the sample being its own mother, and other more complex situations."""
+ sample: TomteSample | MipDnaSample | ExistingSample = node.sample
+ if node.sample_name == sample.mother:
+ return SampleIsOwnMotherError(sample_index=node.sample_index, case_index=node.case_index)
+ return DescendantAsMotherError(sample_index=node.sample_index, case_index=node.case_index)
+
+
+def get_father_error(node: Node) -> PedigreeError:
+ """Called when the node's 'father' creates a cycle in the family tree. For clearer feedback
+ we distinguish between the sample being its own father, and other more complex situations."""
+ sample: TomteSample = node.sample
+ if node.sample_name == sample.father:
+ return SampleIsOwnFatherError(sample_index=node.sample_index, case_index=node.case_index)
+ return DescendantAsFatherError(sample_index=node.sample_index, case_index=node.case_index)
diff --git a/cg/services/orders/validation/rules/case_sample/pedigree/validate_pedigree.py b/cg/services/orders/validation/rules/case_sample/pedigree/validate_pedigree.py
new file mode 100644
index 0000000000..6918d1bc41
--- /dev/null
+++ b/cg/services/orders/validation/rules/case_sample/pedigree/validate_pedigree.py
@@ -0,0 +1,14 @@
+from cg.services.orders.validation.errors.case_sample_errors import PedigreeError
+from cg.services.orders.validation.rules.case_sample.pedigree.models import FamilyTree
+from cg.services.orders.validation.rules.case_sample.pedigree.utils import validate_tree
+from cg.services.orders.validation.workflows.mip_dna.models.case import MipDnaCase
+from cg.services.orders.validation.workflows.tomte.models.case import TomteCase
+from cg.store.store import Store
+
+
+def get_pedigree_errors(
+ case: TomteCase | MipDnaCase, case_index: int, store: Store
+) -> list[PedigreeError]:
+ """Return a list of errors if any sample is labelled as its own ancestor in the family tree."""
+ pedigree = FamilyTree(case=case, case_index=case_index, store=store)
+ return validate_tree(pedigree)
diff --git a/cg/services/orders/validation/rules/case_sample/rules.py b/cg/services/orders/validation/rules/case_sample/rules.py
new file mode 100644
index 0000000000..e0c7a79b06
--- /dev/null
+++ b/cg/services/orders/validation/rules/case_sample/rules.py
@@ -0,0 +1,482 @@
+from collections import Counter
+
+from cg.models.orders.constants import OrderType
+from cg.services.orders.validation.constants import ALLOWED_SKIP_RC_BUFFERS
+from cg.services.orders.validation.errors.case_errors import InvalidGenePanelsError
+from cg.services.orders.validation.errors.case_sample_errors import (
+ ApplicationArchivedError,
+ ApplicationNotCompatibleError,
+ ApplicationNotValidError,
+ BufferMissingError,
+ CaptureKitMissingError,
+ ConcentrationRequiredIfSkipRCError,
+ ContainerNameMissingError,
+ ContainerNameRepeatedError,
+ FatherNotInCaseError,
+ InvalidBufferError,
+ InvalidConcentrationIfSkipRCError,
+ InvalidFatherSexError,
+ InvalidMotherSexError,
+ InvalidVolumeError,
+ MotherNotInCaseError,
+ OccupiedWellError,
+ PedigreeError,
+ SampleDoesNotExistError,
+ SampleNameRepeatedError,
+ SampleNameSameAsCaseNameError,
+ SampleOutsideOfCollaborationError,
+ SexSubjectIdError,
+ StatusUnknownError,
+ SubjectIdSameAsCaseNameError,
+ SubjectIdSameAsSampleNameError,
+ VolumeRequiredError,
+ WellFormatError,
+ WellPositionMissingError,
+)
+from cg.services.orders.validation.models.order_with_cases import OrderWithCases
+from cg.services.orders.validation.models.sample_aliases import SampleInCase
+from cg.services.orders.validation.rules.case_sample.pedigree.validate_pedigree import (
+ get_pedigree_errors,
+)
+from cg.services.orders.validation.rules.case_sample.utils import (
+ are_all_samples_unknown,
+ get_counter_container_names,
+ get_existing_case_names,
+ get_existing_sample_names,
+ get_father_case_errors,
+ get_father_sex_errors,
+ get_invalid_panels,
+ get_mother_case_errors,
+ get_mother_sex_errors,
+ get_occupied_well_errors,
+ get_well_sample_map,
+ has_sex_and_subject,
+ is_buffer_missing,
+ is_concentration_missing,
+ is_container_name_missing,
+ is_invalid_plate_well_format,
+ is_sample_missing_capture_kit,
+ is_sample_not_from_collaboration,
+ is_sample_tube_name_reused,
+ is_well_position_missing,
+ validate_concentration_in_case,
+ validate_subject_ids_in_case,
+)
+from cg.services.orders.validation.rules.utils import (
+ is_application_compatible,
+ is_volume_invalid,
+ is_volume_missing,
+)
+from cg.services.orders.validation.workflows.balsamic.models.order import BalsamicOrder
+from cg.services.orders.validation.workflows.balsamic_umi.models.order import BalsamicUmiOrder
+from cg.store.models import Sample as DbSample
+from cg.store.store import Store
+
+
+def validate_application_compatibility(
+ order: OrderWithCases,
+ store: Store,
+ **kwargs,
+) -> list[ApplicationNotCompatibleError]:
+ errors: list[ApplicationNotCompatibleError] = []
+ order_type: OrderType = order.order_type
+ for case_index, case in order.enumerated_new_cases:
+ for sample_index, sample in case.enumerated_new_samples:
+ if not is_application_compatible(
+ order_type=order_type,
+ application_tag=sample.application,
+ store=store,
+ ):
+ error = ApplicationNotCompatibleError(
+ case_index=case_index,
+ sample_index=sample_index,
+ )
+ errors.append(error)
+ return errors
+
+
+def validate_buffer_skip_rc_condition(order: OrderWithCases, **kwargs) -> list[InvalidBufferError]:
+ errors: list[InvalidBufferError] = []
+ if order.skip_reception_control:
+ errors.extend(validate_buffers_are_allowed(order))
+ return errors
+
+
+def validate_buffers_are_allowed(order: OrderWithCases, **kwargs) -> list[InvalidBufferError]:
+ errors: list[InvalidBufferError] = []
+ for case_index, case in order.enumerated_new_cases:
+ for sample_index, sample in case.enumerated_new_samples:
+ if sample.elution_buffer not in ALLOWED_SKIP_RC_BUFFERS:
+ error = InvalidBufferError(case_index=case_index, sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_concentration_required_if_skip_rc(
+ order: OrderWithCases, **kwargs
+) -> list[ConcentrationRequiredIfSkipRCError]:
+ if not order.skip_reception_control:
+ return []
+ errors: list[ConcentrationRequiredIfSkipRCError] = []
+ for case_index, case in order.enumerated_new_cases:
+ for sample_index, sample in case.enumerated_new_samples:
+ if is_concentration_missing(sample):
+ error = ConcentrationRequiredIfSkipRCError(
+ case_index=case_index,
+ sample_index=sample_index,
+ )
+ errors.append(error)
+ return errors
+
+
+def validate_subject_ids_different_from_sample_names(
+ order: OrderWithCases, **kwargs
+) -> list[SubjectIdSameAsSampleNameError]:
+ errors: list[SubjectIdSameAsSampleNameError] = []
+ for case_index, case in order.enumerated_new_cases:
+ for sample_index, sample in case.enumerated_new_samples:
+ if sample.name == sample.subject_id:
+ error = SubjectIdSameAsSampleNameError(
+ case_index=case_index,
+ sample_index=sample_index,
+ )
+ errors.append(error)
+ return errors
+
+
+def validate_well_positions_required(
+ order: OrderWithCases, **kwargs
+) -> list[WellPositionMissingError]:
+ errors: list[WellPositionMissingError] = []
+ for case_index, case in order.enumerated_new_cases:
+ for sample_index, sample in case.enumerated_new_samples:
+ if is_well_position_missing(sample):
+ error = WellPositionMissingError(case_index=case_index, sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_container_name_required(
+ order: OrderWithCases, **kwargs
+) -> list[ContainerNameMissingError]:
+ errors: list[ContainerNameMissingError] = []
+ for case_index, case in order.enumerated_new_cases:
+ for sample_index, sample in case.enumerated_new_samples:
+ if is_container_name_missing(sample):
+ error = ContainerNameMissingError(
+ case_index=case_index,
+ sample_index=sample_index,
+ )
+ errors.append(error)
+ return errors
+
+
+def validate_application_exists(
+ order: OrderWithCases,
+ store: Store,
+ **kwargs,
+) -> list[ApplicationNotValidError]:
+ errors: list[ApplicationNotValidError] = []
+ for case_index, case in order.enumerated_new_cases:
+ for sample_index, sample in case.enumerated_new_samples:
+ if not store.get_application_by_tag(sample.application):
+ error = ApplicationNotValidError(case_index=case_index, sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_application_not_archived(
+ order: OrderWithCases,
+ store: Store,
+ **kwargs,
+) -> list[ApplicationArchivedError]:
+ errors: list[ApplicationArchivedError] = []
+ for case_index, case in order.enumerated_new_cases:
+ for sample_index, sample in case.enumerated_new_samples:
+ if store.is_application_archived(sample.application):
+ error = ApplicationArchivedError(case_index=case_index, sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_gene_panels_exist(
+ order: OrderWithCases,
+ store: Store,
+ **kwargs,
+) -> list[InvalidGenePanelsError]:
+ errors: list[InvalidGenePanelsError] = []
+ for case_index, case in order.enumerated_new_cases:
+ if invalid_panels := get_invalid_panels(panels=case.panels, store=store):
+ case_error = InvalidGenePanelsError(case_index=case_index, panels=invalid_panels)
+ errors.append(case_error)
+ return errors
+
+
+def validate_volume_interval(order: OrderWithCases, **kwargs) -> list[InvalidVolumeError]:
+ errors: list[InvalidVolumeError] = []
+ for case_index, case in order.enumerated_new_cases:
+ for sample_index, sample in case.enumerated_new_samples:
+ if is_volume_invalid(sample):
+ error = InvalidVolumeError(case_index=case_index, sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_volume_required(order: OrderWithCases, **kwargs) -> list[VolumeRequiredError]:
+ errors: list[VolumeRequiredError] = []
+ for case_index, case in order.enumerated_new_cases:
+ for sample_index, sample in case.enumerated_new_samples:
+ if is_volume_missing(sample):
+ error = VolumeRequiredError(case_index=case_index, sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_samples_exist(
+ order: OrderWithCases,
+ store: Store,
+ **kwargs,
+) -> list[SampleDoesNotExistError]:
+ errors: list[SampleDoesNotExistError] = []
+ for case_index, case in order.enumerated_new_cases:
+ for sample_index, sample in case.enumerated_existing_samples:
+ sample: DbSample | None = store.get_sample_by_internal_id(sample.internal_id)
+ if not sample:
+ error = SampleDoesNotExistError(case_index=case_index, sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_wells_contain_at_most_one_sample(
+ order: OrderWithCases, **kwargs
+) -> list[OccupiedWellError]:
+ errors: list[OccupiedWellError] = []
+ well_position_to_sample_map: dict[tuple[str, str], list[tuple[int, int]]] = get_well_sample_map(
+ order
+ )
+ for indices in well_position_to_sample_map.values():
+ if len(indices) > 1:
+ well_errors = get_occupied_well_errors(indices[1:])
+ errors.extend(well_errors)
+ return errors
+
+
+def validate_sample_names_not_repeated(
+ order: OrderWithCases, store: Store, **kwargs
+) -> list[SampleNameRepeatedError]:
+ old_sample_names: set[str] = get_existing_sample_names(order=order, status_db=store)
+ new_samples: list[tuple[int, int, SampleInCase]] = order.enumerated_new_samples
+ sample_name_counter = Counter([sample.name for _, _, sample in new_samples])
+ return [
+ SampleNameRepeatedError(case_index=case_index, sample_index=sample_index)
+ for case_index, sample_index, sample in new_samples
+ if sample_name_counter.get(sample.name) > 1 or sample.name in old_sample_names
+ ]
+
+
+def validate_sample_names_different_from_case_names(
+ order: OrderWithCases, store: Store, **kwargs
+) -> list[SampleNameSameAsCaseNameError]:
+ """Return errors with the indexes of samples having the same name as any case in the order."""
+ errors: list[SampleNameSameAsCaseNameError] = []
+ new_case_names: set[str] = {case.name for _, case in order.enumerated_new_cases}
+ existing_case_names: set[str] = get_existing_case_names(order=order, status_db=store)
+ all_case_names = new_case_names.union(existing_case_names)
+ for case_index, sample_index, sample in order.enumerated_new_samples:
+ if sample.name in all_case_names:
+ error = SampleNameSameAsCaseNameError(
+ case_index=case_index,
+ sample_index=sample_index,
+ )
+ errors.append(error)
+ return errors
+
+
+def validate_fathers_are_male(order: OrderWithCases, **kwargs) -> list[InvalidFatherSexError]:
+ errors: list[InvalidFatherSexError] = []
+ for index, case in order.enumerated_new_cases:
+ case_errors: list[InvalidFatherSexError] = get_father_sex_errors(
+ case=case, case_index=index
+ )
+ errors.extend(case_errors)
+ return errors
+
+
+def validate_fathers_in_same_case_as_children(
+ order: OrderWithCases, **kwargs
+) -> list[FatherNotInCaseError]:
+ errors: list[FatherNotInCaseError] = []
+ for index, case in order.enumerated_new_cases:
+ case_errors: list[FatherNotInCaseError] = get_father_case_errors(
+ case=case,
+ case_index=index,
+ )
+ errors.extend(case_errors)
+ return errors
+
+
+def validate_mothers_are_female(order: OrderWithCases, **kwargs) -> list[InvalidMotherSexError]:
+ errors: list[InvalidMotherSexError] = []
+ for index, case in order.enumerated_new_cases:
+ case_errors: list[InvalidMotherSexError] = get_mother_sex_errors(
+ case=case,
+ case_index=index,
+ )
+ errors.extend(case_errors)
+ return errors
+
+
+def validate_mothers_in_same_case_as_children(
+ order: OrderWithCases, **kwargs
+) -> list[MotherNotInCaseError]:
+ errors: list[MotherNotInCaseError] = []
+ for index, case in order.enumerated_new_cases:
+ case_errors: list[MotherNotInCaseError] = get_mother_case_errors(
+ case=case,
+ case_index=index,
+ )
+ errors.extend(case_errors)
+ return errors
+
+
+def validate_pedigree(order: OrderWithCases, store: Store, **kwargs) -> list[PedigreeError]:
+ errors: list[PedigreeError] = []
+ for case_index, case in order.enumerated_new_cases:
+ case_errors: list[PedigreeError] = get_pedigree_errors(
+ case=case, case_index=case_index, store=store
+ )
+ errors.extend(case_errors)
+ return errors
+
+
+def validate_subject_sex_consistency(
+ order: OrderWithCases,
+ store: Store,
+) -> list[SexSubjectIdError]:
+ errors: list[SexSubjectIdError] = []
+
+ for case_index, sample_index, sample in order.enumerated_new_samples:
+ if not has_sex_and_subject(sample):
+ continue
+ if store.sample_exists_with_different_sex(
+ customer_internal_id=order.customer,
+ subject_id=sample.subject_id,
+ sex=sample.sex,
+ ):
+ error = SexSubjectIdError(
+ case_index=case_index,
+ sample_index=sample_index,
+ )
+ errors.append(error)
+ return errors
+
+
+def validate_subject_ids_different_from_case_names(
+ order: OrderWithCases, **kwargs
+) -> list[SubjectIdSameAsCaseNameError]:
+ errors: list[SubjectIdSameAsCaseNameError] = []
+ for index, case in order.enumerated_new_cases:
+ case_errors: list[SubjectIdSameAsCaseNameError] = validate_subject_ids_in_case(
+ case=case,
+ case_index=index,
+ )
+ errors.extend(case_errors)
+ return errors
+
+
+def validate_concentration_interval_if_skip_rc(
+ order: OrderWithCases, store: Store, **kwargs
+) -> list[InvalidConcentrationIfSkipRCError]:
+ if not order.skip_reception_control:
+ return []
+ errors: list[InvalidConcentrationIfSkipRCError] = []
+ for index, case in order.enumerated_new_cases:
+ case_errors: list[InvalidConcentrationIfSkipRCError] = validate_concentration_in_case(
+ case=case,
+ case_index=index,
+ store=store,
+ )
+ errors.extend(case_errors)
+ return errors
+
+
+def validate_well_position_format(order: OrderWithCases, **kwargs) -> list[WellFormatError]:
+ errors: list[WellFormatError] = []
+ for case_index, case in order.enumerated_new_cases:
+ for sample_index, sample in case.enumerated_new_samples:
+ if is_invalid_plate_well_format(sample=sample):
+ error = WellFormatError(case_index=case_index, sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_tube_container_name_unique(
+ order: OrderWithCases, **kwargs
+) -> list[ContainerNameRepeatedError]:
+ errors: list[ContainerNameRepeatedError] = []
+
+ container_name_counter: Counter = get_counter_container_names(order)
+
+ for case_index, case in order.enumerated_new_cases:
+ for sample_index, sample in case.enumerated_new_samples:
+ if is_sample_tube_name_reused(sample=sample, counter=container_name_counter):
+ error = ContainerNameRepeatedError(case_index=case_index, sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_not_all_samples_unknown_in_case(
+ order: OrderWithCases, **kwargs
+) -> list[StatusUnknownError]:
+ errors: list[StatusUnknownError] = []
+
+ for case_index, case in order.enumerated_new_cases:
+ if are_all_samples_unknown(case):
+ for sample_index, _ in case.enumerated_samples:
+ error = StatusUnknownError(case_index=case_index, sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_buffer_required(order: OrderWithCases, **kwargs) -> list[BufferMissingError]:
+ """Return an error for each new sample missing a buffer, if its application requires one."""
+
+ errors: list[BufferMissingError] = []
+ for case_index, sample_index, sample in order.enumerated_new_samples:
+ if is_buffer_missing(sample):
+ error = BufferMissingError(case_index=case_index, sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_capture_kit_panel_requirement(
+ order: BalsamicOrder | BalsamicUmiOrder, store: Store
+) -> list[CaptureKitMissingError]:
+ """
+ Return an error for each new sample missing a capture kit, if its application requires one.
+ Applicable to Balsamic and Balsamic-UMI orders only.
+ """
+ errors: list[CaptureKitMissingError] = []
+ for case_index, case in order.enumerated_new_cases:
+ for sample_index, sample in case.enumerated_new_samples:
+ if is_sample_missing_capture_kit(sample=sample, store=store):
+ error = CaptureKitMissingError(case_index=case_index, sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_existing_samples_belong_to_collaboration(
+ order: OrderWithCases, store: Store, **kwargs
+) -> list[SampleOutsideOfCollaborationError]:
+ """Validates that existing samples belong to the same collaboration as the order's customer."""
+ errors: list[SampleOutsideOfCollaborationError] = []
+ for case_index, case in order.enumerated_new_cases:
+ for sample_index, sample in case.enumerated_existing_samples:
+ if is_sample_not_from_collaboration(
+ customer_id=order.customer, sample=sample, store=store
+ ):
+ error = SampleOutsideOfCollaborationError(
+ sample_index=sample_index, case_index=case_index
+ )
+ errors.append(error)
+ return errors
diff --git a/cg/services/orders/validation/rules/case_sample/utils.py b/cg/services/orders/validation/rules/case_sample/utils.py
new file mode 100644
index 0000000000..9db37b3c9e
--- /dev/null
+++ b/cg/services/orders/validation/rules/case_sample/utils.py
@@ -0,0 +1,317 @@
+import re
+from collections import Counter
+
+from cg.constants.constants import StatusOptions
+from cg.constants.sequencing import SeqLibraryPrepCategory
+from cg.constants.subject import Sex
+from cg.models.orders.sample_base import ContainerEnum, SexEnum
+from cg.services.orders.validation.errors.case_errors import RepeatedCaseNameError
+from cg.services.orders.validation.errors.case_sample_errors import (
+ FatherNotInCaseError,
+ InvalidConcentrationIfSkipRCError,
+ InvalidFatherSexError,
+ InvalidMotherSexError,
+ MotherNotInCaseError,
+ OccupiedWellError,
+ SubjectIdSameAsCaseNameError,
+)
+from cg.services.orders.validation.models.case import Case
+from cg.services.orders.validation.models.case_aliases import (
+ CaseContainingRelatives,
+ CaseWithSkipRC,
+)
+from cg.services.orders.validation.models.existing_sample import ExistingSample
+from cg.services.orders.validation.models.order_with_cases import OrderWithCases
+from cg.services.orders.validation.models.sample import Sample
+from cg.services.orders.validation.models.sample_aliases import (
+ HumanSample,
+ SampleInCase,
+ SampleWithRelatives,
+)
+from cg.services.orders.validation.rules.utils import (
+ get_concentration_interval,
+ has_sample_invalid_concentration,
+ is_in_container,
+ is_sample_on_plate,
+ is_volume_within_allowed_interval,
+)
+from cg.services.orders.validation.workflows.balsamic.models.sample import BalsamicSample
+from cg.services.orders.validation.workflows.balsamic_umi.models.sample import BalsamicUmiSample
+from cg.store.models import Application, Customer
+from cg.store.models import Sample as DbSample
+from cg.store.store import Store
+
+
+def is_concentration_missing(sample: SampleWithRelatives) -> bool:
+ return not sample.concentration_ng_ul
+
+
+def is_well_position_missing(sample: SampleWithRelatives) -> bool:
+ return sample.container == ContainerEnum.plate and not sample.well_position
+
+
+def is_container_name_missing(sample: SampleWithRelatives) -> bool:
+ return sample.container == ContainerEnum.plate and not sample.container_name
+
+
+def get_invalid_panels(panels: list[str], store: Store) -> list[str]:
+ invalid_panels: list[str] = [
+ panel for panel in panels if not store.does_gene_panel_exist(panel)
+ ]
+ return invalid_panels
+
+
+def is_volume_invalid(sample: Sample) -> bool:
+ in_container: bool = is_in_container(sample.container)
+ allowed_volume: bool = is_volume_within_allowed_interval(sample.volume)
+ return in_container and not allowed_volume
+
+
+def get_well_sample_map(
+ order: OrderWithCases, **kwargs
+) -> dict[tuple[str, str], list[tuple[int, int]]]:
+ """
+ Constructs a dict with keys being a (container_name, well_position) pair. For each such pair, the value will be
+ a list of (case index, sample index) pairs corresponding to all samples with matching container_name and
+ well_position, provided the sample is on a plate.
+ """
+ well_position_to_sample_map = {}
+ for case_index, case in order.enumerated_new_cases:
+ for sample_index, sample in case.enumerated_new_samples:
+ if is_sample_on_plate(sample):
+ key: tuple[str, str] = (sample.container_name, sample.well_position)
+ value: tuple[int, int] = (case_index, sample_index)
+ if not well_position_to_sample_map.get(key):
+ well_position_to_sample_map[key] = []
+ well_position_to_sample_map[key].append(value)
+ return well_position_to_sample_map
+
+
+def get_occupied_well_errors(colliding_samples: list[tuple[int, int]]) -> list[OccupiedWellError]:
+ errors: list[OccupiedWellError] = []
+ for case_index, sample_index in colliding_samples:
+ error = OccupiedWellError(case_index=case_index, sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def get_indices_for_repeated_case_names(order: OrderWithCases) -> list[int]:
+ counter = Counter([case.name for _, case in order.enumerated_new_cases])
+ indices: list[int] = []
+
+ for index, case in order.enumerated_new_cases:
+ if counter.get(case.name) > 1:
+ indices.append(index)
+
+ return indices
+
+
+def get_repeated_case_name_errors(order: OrderWithCases) -> list[RepeatedCaseNameError]:
+ case_indices: list[int] = get_indices_for_repeated_case_names(order)
+ return [RepeatedCaseNameError(case_index=case_index) for case_index in case_indices]
+
+
+def get_father_sex_errors(
+ case: CaseContainingRelatives, case_index: int
+) -> list[InvalidFatherSexError]:
+ errors: list[InvalidFatherSexError] = []
+ children: list[tuple[SampleWithRelatives, int]] = case.get_samples_with_father()
+ for child, child_index in children:
+ if is_father_sex_invalid(child=child, case=case):
+ error: InvalidFatherSexError = create_father_sex_error(
+ case_index=case_index, sample_index=child_index
+ )
+ errors.append(error)
+ return errors
+
+
+def is_father_sex_invalid(child: SampleWithRelatives, case: CaseContainingRelatives) -> bool:
+ father: SampleWithRelatives | None = case.get_sample(child.father)
+ return father and father.sex != Sex.MALE
+
+
+def create_father_sex_error(case_index: int, sample_index: int) -> InvalidFatherSexError:
+ return InvalidFatherSexError(case_index=case_index, sample_index=sample_index)
+
+
+def get_father_case_errors(
+ case: CaseContainingRelatives,
+ case_index: int,
+) -> list[FatherNotInCaseError]:
+ errors: list[FatherNotInCaseError] = []
+ children: list[tuple[SampleWithRelatives, int]] = case.get_samples_with_father()
+ for child, child_index in children:
+ father: SampleWithRelatives | None = case.get_sample(child.father)
+ if not father:
+ error: FatherNotInCaseError = create_father_case_error(
+ case_index=case_index,
+ sample_index=child_index,
+ )
+ errors.append(error)
+ return errors
+
+
+def get_mother_sex_errors(
+ case: CaseContainingRelatives,
+ case_index: int,
+) -> list[InvalidMotherSexError]:
+ errors: list[InvalidMotherSexError] = []
+ children: list[tuple[SampleWithRelatives, int]] = case.get_samples_with_mother()
+ for child, child_index in children:
+ if is_mother_sex_invalid(child=child, case=case):
+ error: InvalidMotherSexError = create_mother_sex_error(
+ case_index=case_index,
+ sample_index=child_index,
+ )
+ errors.append(error)
+ return errors
+
+
+def get_mother_case_errors(
+ case: CaseContainingRelatives,
+ case_index: int,
+) -> list[MotherNotInCaseError]:
+ errors: list[MotherNotInCaseError] = []
+ children: list[tuple[SampleWithRelatives, int]] = case.get_samples_with_mother()
+ for child, child_index in children:
+ mother: SampleWithRelatives | None = case.get_sample(child.mother)
+ if not mother:
+ error: MotherNotInCaseError = create_mother_case_error(
+ case_index=case_index, sample_index=child_index
+ )
+ errors.append(error)
+ return errors
+
+
+def create_father_case_error(case_index: int, sample_index: int) -> FatherNotInCaseError:
+ return FatherNotInCaseError(case_index=case_index, sample_index=sample_index)
+
+
+def create_mother_case_error(case_index: int, sample_index: int) -> MotherNotInCaseError:
+ return MotherNotInCaseError(case_index=case_index, sample_index=sample_index)
+
+
+def is_mother_sex_invalid(child: SampleWithRelatives, case: CaseContainingRelatives) -> bool:
+ mother: SampleWithRelatives | None = case.get_sample(child.mother)
+ return mother and mother.sex != Sex.FEMALE
+
+
+def create_mother_sex_error(case_index: int, sample_index: int) -> InvalidMotherSexError:
+ return InvalidMotherSexError(case_index=case_index, sample_index=sample_index)
+
+
+def has_sex_and_subject(sample: HumanSample) -> bool:
+ return bool(sample.subject_id and sample.sex != SexEnum.unknown)
+
+
+def validate_subject_ids_in_case(
+ case: CaseContainingRelatives, case_index: int
+) -> list[SubjectIdSameAsCaseNameError]:
+ errors: list[SubjectIdSameAsCaseNameError] = []
+ for sample_index, sample in case.enumerated_new_samples:
+ if sample.subject_id == case.name:
+ error = SubjectIdSameAsCaseNameError(case_index=case_index, sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_concentration_in_case(
+ case: CaseWithSkipRC, case_index: int, store: Store
+) -> list[InvalidConcentrationIfSkipRCError]:
+ errors: list[InvalidConcentrationIfSkipRCError] = []
+ for sample_index, sample in case.enumerated_new_samples:
+ if application := store.get_application_by_tag(sample.application):
+ allowed_interval = get_concentration_interval(sample=sample, application=application)
+ if has_sample_invalid_concentration(sample=sample, allowed_interval=allowed_interval):
+ error: InvalidConcentrationIfSkipRCError = create_invalid_concentration_error(
+ case_index=case_index,
+ sample_index=sample_index,
+ allowed_interval=allowed_interval,
+ )
+ errors.append(error)
+ return errors
+
+
+def create_invalid_concentration_error(
+ case_index: int, sample_index: int, allowed_interval: tuple[float, float]
+) -> InvalidConcentrationIfSkipRCError:
+ return InvalidConcentrationIfSkipRCError(
+ case_index=case_index,
+ sample_index=sample_index,
+ allowed_interval=allowed_interval,
+ )
+
+
+def is_invalid_plate_well_format(sample: Sample) -> bool:
+ """Check if a sample has an invalid well format."""
+ correct_well_position_pattern: str = r"^[A-H]:([1-9]|1[0-2])$"
+ if sample.is_on_plate:
+ return not bool(re.match(correct_well_position_pattern, sample.well_position))
+ return False
+
+
+def is_sample_tube_name_reused(sample: Sample, counter: Counter) -> bool:
+ """Check if a tube container name is reused across samples."""
+ return sample.container == ContainerEnum.tube and counter.get(sample.container_name) > 1
+
+
+def get_counter_container_names(order: OrderWithCases) -> Counter:
+ counter = Counter(
+ sample.container_name
+ for case_index, case in order.enumerated_new_cases
+ for sample_index, sample in case.enumerated_new_samples
+ )
+ return counter
+
+
+def get_existing_sample_names(order: OrderWithCases, status_db: Store) -> set[str]:
+ existing_sample_names: set[str] = set()
+ for case in order.cases:
+ if case.is_new:
+ for sample_index, sample in case.enumerated_existing_samples:
+ db_sample = status_db.get_sample_by_internal_id(sample.internal_id)
+ existing_sample_names.add(db_sample.name)
+ else:
+ db_case = status_db.get_case_by_internal_id(case.internal_id)
+ for sample in db_case.samples:
+ existing_sample_names.add(sample.name)
+ return existing_sample_names
+
+
+def are_all_samples_unknown(case: Case) -> bool:
+ """Check if all samples in a case are unknown."""
+ return all(sample.status == StatusOptions.UNKNOWN for sample in case.samples)
+
+
+def is_buffer_missing(sample: SampleInCase) -> bool:
+ applications_requiring_buffer: tuple = ("PAN", "EX", "WGSWPF", "METWPF")
+ return bool(
+ sample.application.startswith(tuple(applications_requiring_buffer))
+ and not sample.elution_buffer
+ )
+
+
+def is_sample_missing_capture_kit(sample: BalsamicSample | BalsamicUmiSample, store: Store) -> bool:
+ """Returns whether a TGS sample has an application and is missing a capture kit."""
+ application: Application | None = store.get_application_by_tag(sample.application)
+ return (
+ application
+ and application.prep_category == SeqLibraryPrepCategory.TARGETED_GENOME_SEQUENCING
+ and not sample.capture_kit
+ )
+
+
+def is_sample_not_from_collaboration(
+ customer_id: str, sample: ExistingSample, store: Store
+) -> bool:
+ db_sample: DbSample | None = store.get_sample_by_internal_id(sample.internal_id)
+ customer: Customer | None = store.get_customer_by_internal_id(customer_id)
+ return db_sample and customer and db_sample.customer not in customer.collaborators
+
+
+def get_existing_case_names(order: OrderWithCases, status_db: Store) -> set[str]:
+ existing_case_names: set[str] = set()
+ for _, case in order.enumerated_existing_cases:
+ if db_case := status_db.get_case_by_internal_id(case.internal_id):
+ existing_case_names.add(db_case.name)
+ return existing_case_names
diff --git a/cg/services/orders/validation/rules/order/__init__.py b/cg/services/orders/validation/rules/order/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/cg/services/orders/validation/rules/order/rules.py b/cg/services/orders/validation/rules/order/rules.py
new file mode 100644
index 0000000000..e7bd52b3f3
--- /dev/null
+++ b/cg/services/orders/validation/rules/order/rules.py
@@ -0,0 +1,47 @@
+from cg.services.orders.validation.errors.order_errors import (
+ CustomerCannotSkipReceptionControlError,
+ CustomerDoesNotExistError,
+ UserNotAssociatedWithCustomerError,
+)
+from cg.services.orders.validation.models.order import Order
+from cg.store.store import Store
+
+
+def validate_customer_exists(
+ order: Order,
+ store: Store,
+ **kwargs,
+) -> list[CustomerDoesNotExistError]:
+ errors: list[CustomerDoesNotExistError] = []
+ if not store.customer_exists(order.customer):
+ error = CustomerDoesNotExistError()
+ errors.append(error)
+ return errors
+
+
+def validate_user_belongs_to_customer(
+ order: Order, store: Store, **kwargs
+) -> list[UserNotAssociatedWithCustomerError]:
+ has_access: bool = store.is_user_associated_with_customer(
+ user_id=order._user_id,
+ customer_internal_id=order.customer,
+ )
+
+ errors: list[UserNotAssociatedWithCustomerError] = []
+ if not has_access:
+ error = UserNotAssociatedWithCustomerError()
+ errors.append(error)
+ return errors
+
+
+def validate_customer_can_skip_reception_control(
+ order: Order,
+ store: Store,
+ **kwargs,
+) -> list[CustomerCannotSkipReceptionControlError]:
+ errors: list[CustomerCannotSkipReceptionControlError] = []
+
+ if order.skip_reception_control and not store.is_customer_trusted(order.customer):
+ error = CustomerCannotSkipReceptionControlError()
+ errors.append(error)
+ return errors
diff --git a/cg/services/orders/validation/rules/sample/__init__.py b/cg/services/orders/validation/rules/sample/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/cg/services/orders/validation/rules/sample/rules.py b/cg/services/orders/validation/rules/sample/rules.py
new file mode 100644
index 0000000000..bf6a58f255
--- /dev/null
+++ b/cg/services/orders/validation/rules/sample/rules.py
@@ -0,0 +1,389 @@
+from cg.models.orders.constants import OrderType
+from cg.services.orders.validation.errors.sample_errors import (
+ ApplicationArchivedError,
+ ApplicationNotCompatibleError,
+ ApplicationNotValidError,
+ BufferInvalidError,
+ ConcentrationInvalidIfSkipRCError,
+ ConcentrationRequiredError,
+ ContainerNameMissingError,
+ ContainerNameRepeatedError,
+ IndexNumberMissingError,
+ IndexNumberOutOfRangeError,
+ IndexSequenceMismatchError,
+ IndexSequenceMissingError,
+ InvalidVolumeError,
+ OccupiedWellError,
+ PoolApplicationError,
+ PoolPriorityError,
+ SampleNameNotAvailableControlError,
+ SampleNameNotAvailableError,
+ SampleNameRepeatedError,
+ VolumeRequiredError,
+ WellFormatError,
+ WellFormatRmlError,
+ WellPositionMissingError,
+ WellPositionRmlMissingError,
+)
+from cg.services.orders.validation.models.order_aliases import (
+ OrderWithControlSamples,
+ OrderWithIndexedSamples,
+)
+from cg.services.orders.validation.models.sample_aliases import IndexedSample
+from cg.services.orders.validation.rules.sample.utils import (
+ PlateSamplesValidator,
+ get_indices_for_repeated_sample_names,
+ get_indices_for_tube_repeated_container_name,
+ get_sample_name_not_available_errors,
+ has_multiple_applications,
+ has_multiple_priorities,
+ is_container_name_missing,
+ is_index_number_missing,
+ is_index_number_out_of_range,
+ is_index_sequence_mismatched,
+ is_index_sequence_missing,
+ is_invalid_well_format,
+ is_invalid_well_format_rml,
+ validate_buffers_are_allowed,
+ validate_concentration_interval,
+ validate_concentration_required,
+)
+from cg.services.orders.validation.rules.utils import (
+ is_application_compatible,
+ is_volume_invalid,
+ is_volume_missing,
+)
+from cg.services.orders.validation.workflows.fastq.models.order import FastqOrder
+from cg.services.orders.validation.workflows.microsalt.models.order import OrderWithSamples
+from cg.store.store import Store
+
+
+def validate_application_compatibility(
+ order: OrderWithSamples,
+ store: Store,
+ **kwargs,
+) -> list[ApplicationNotCompatibleError]:
+ """
+ Validate that the applications of all samples in the order are compatible with the order type.
+ Applicable to all order types.
+ """
+ errors: list[ApplicationNotCompatibleError] = []
+ order_type: OrderType = order.order_type
+ for sample_index, sample in order.enumerated_samples:
+ compatible: bool = is_application_compatible(
+ order_type=order_type,
+ application_tag=sample.application,
+ store=store,
+ )
+ if not compatible:
+ error = ApplicationNotCompatibleError(sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_application_exists(
+ order: OrderWithSamples, store: Store, **kwargs
+) -> list[ApplicationNotValidError]:
+ """
+ Validate that the applications of all samples in the order exist in the database.
+ Applicable to all order types.
+ """
+ errors: list[ApplicationNotValidError] = []
+ for sample_index, sample in order.enumerated_samples:
+ if not store.get_application_by_tag(sample.application):
+ error = ApplicationNotValidError(sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_applications_not_archived(
+ order: OrderWithSamples, store: Store, **kwargs
+) -> list[ApplicationArchivedError]:
+ """
+ Validate that none of the applications of the samples in the order are archived.
+ Applicable to all order types.
+ """
+ errors: list[ApplicationArchivedError] = []
+ for sample_index, sample in order.enumerated_samples:
+ if store.is_application_archived(sample.application):
+ error = ApplicationArchivedError(sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_buffer_skip_rc_condition(order: FastqOrder, **kwargs) -> list[BufferInvalidError]:
+ """
+ Validate that the sample buffers allow skipping reception control if that option is true.
+ Only applicable to order types that have targeted sequencing applications (TGS).
+ """
+ errors: list[BufferInvalidError] = []
+ if order.skip_reception_control:
+ errors.extend(validate_buffers_are_allowed(order))
+ return errors
+
+
+def validate_concentration_interval_if_skip_rc(
+ order: FastqOrder, store: Store, **kwargs
+) -> list[ConcentrationInvalidIfSkipRCError]:
+ """
+ Validate that all samples have an allowed concentration if the order skips reception control.
+ Only applicable to order types that have targeted sequencing applications (TGS).
+ """
+ errors: list[ConcentrationInvalidIfSkipRCError] = []
+ if order.skip_reception_control:
+ errors.extend(validate_concentration_interval(order=order, store=store))
+ return errors
+
+
+def validate_container_name_required(
+ order: OrderWithSamples, **kwargs
+) -> list[ContainerNameMissingError]:
+ """
+ Validate that the container names are present for all samples sent on plates.
+ Applicable to all order types.
+ """
+ errors: list[ContainerNameMissingError] = []
+ for sample_index, sample in order.enumerated_samples:
+ if is_container_name_missing(sample=sample):
+ error = ContainerNameMissingError(sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_concentration_required_if_skip_rc(
+ order: FastqOrder, **kwargs
+) -> list[ConcentrationRequiredError]:
+ """
+ Validate that all samples have a concentration if the order skips reception control.
+ Only applicable to order types that have targeted sequencing applications (TGS).
+ """
+ errors: list[ConcentrationRequiredError] = []
+ if order.skip_reception_control:
+ errors.extend(validate_concentration_required(order))
+ return errors
+
+
+def validate_index_number_in_range(
+ order: OrderWithIndexedSamples, **kwargs
+) -> list[IndexNumberOutOfRangeError]:
+ errors: list[IndexNumberOutOfRangeError] = []
+ for sample_index, sample in order.enumerated_samples:
+ if is_index_number_out_of_range(sample):
+ error = IndexNumberOutOfRangeError(sample_index=sample_index, index=sample.index)
+ errors.append(error)
+ return errors
+
+
+def validate_index_number_required(
+ order: OrderWithIndexedSamples, **kwargs
+) -> list[IndexNumberMissingError]:
+ errors: list[IndexNumberMissingError] = []
+ for sample_index, sample in order.enumerated_samples:
+ if is_index_number_missing(sample):
+ error = IndexNumberMissingError(sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_index_sequence_mismatch(
+ order: OrderWithIndexedSamples, **kwargs
+) -> list[IndexSequenceMismatchError]:
+ errors: list[IndexSequenceMismatchError] = []
+ for sample_index, sample in order.enumerated_samples:
+ if is_index_sequence_mismatched(sample):
+ error = IndexSequenceMismatchError(
+ sample_index=sample_index, index=sample.index, index_number=sample.index_number
+ )
+ errors.append(error)
+ return errors
+
+
+def validate_index_sequence_required(
+ order: OrderWithIndexedSamples, **kwargs
+) -> list[IndexSequenceMissingError]:
+ errors: list[IndexSequenceMissingError] = []
+ for sample_index, sample in order.enumerated_samples:
+ if is_index_sequence_missing(sample):
+ error = IndexSequenceMissingError(sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_pools_contain_one_application(
+ order: OrderWithIndexedSamples, **kwargs
+) -> list[PoolApplicationError]:
+ """
+ Validate that the pools in the order contain only samples with the same application.
+ Only applicable to order types with indexed samples (RML and Fluffy).
+ """
+ errors: list[PoolApplicationError] = []
+ for pool, enumerated_samples in order.enumerated_pools.items():
+ samples: list[IndexedSample] = [sample for _, sample in enumerated_samples]
+ if has_multiple_applications(samples):
+ for sample_index, _ in enumerated_samples:
+ error = PoolApplicationError(sample_index=sample_index, pool_name=pool)
+ errors.append(error)
+ return errors
+
+
+def validate_pools_contain_one_priority(
+ order: OrderWithIndexedSamples, **kwargs
+) -> list[PoolPriorityError]:
+ """
+ Validate that the pools in the order contain only samples with the same priority.
+ Only applicable to order types with indexed samples (RML and Fluffy).
+ """
+ errors: list[PoolPriorityError] = []
+ for pool, enumerated_samples in order.enumerated_pools.items():
+ samples: list[IndexedSample] = [sample for _, sample in enumerated_samples]
+ if has_multiple_priorities(samples):
+ for sample_index, _ in enumerated_samples:
+ error = PoolPriorityError(sample_index=sample_index, pool_name=pool)
+ errors.append(error)
+ return errors
+
+
+def validate_sample_names_available(
+ order: OrderWithSamples, store: Store, **kwargs
+) -> list[SampleNameNotAvailableError]:
+ """
+ Validate that the sample names do not exists in the database under the same customer.
+ Applicable to all orders without control samples.
+ """
+ errors: list[SampleNameNotAvailableError] = get_sample_name_not_available_errors(
+ order=order, store=store, has_order_control=False
+ )
+ return errors
+
+
+def validate_non_control_sample_names_available(
+ order: OrderWithControlSamples, store: Store, **kwargs
+) -> list[SampleNameNotAvailableControlError]:
+ """
+ Validate that non-control sample names do not exists in the database under the same customer.
+ Applicable to all orders with control samples.
+ """
+ errors: list[SampleNameNotAvailableControlError] = get_sample_name_not_available_errors(
+ order=order, store=store, has_order_control=True
+ )
+ return errors
+
+
+def validate_sample_names_unique(
+ order: OrderWithSamples, **kwargs
+) -> list[SampleNameRepeatedError]:
+ """
+ Validate that all the sample names are unique within the order.
+ Applicable to all order types.
+ """
+ sample_indices: list[int] = get_indices_for_repeated_sample_names(order)
+ return [SampleNameRepeatedError(sample_index=sample_index) for sample_index in sample_indices]
+
+
+def validate_tube_container_name_unique(
+ order: OrderWithSamples,
+ **kwargs,
+) -> list[ContainerNameRepeatedError]:
+ """
+ Validate that the container names are unique for tube samples within the order.
+ Applicable to all order types.
+ """
+ errors: list[ContainerNameRepeatedError] = []
+ repeated_container_name_indices: list = get_indices_for_tube_repeated_container_name(order)
+ for sample_index in repeated_container_name_indices:
+ error = ContainerNameRepeatedError(sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_volume_interval(order: OrderWithSamples, **kwargs) -> list[InvalidVolumeError]:
+ """
+ Validate that the volume of all samples is within the allowed interval.
+ Applicable to all order types.
+ """
+ errors: list[InvalidVolumeError] = []
+ for sample_index, sample in order.enumerated_samples:
+ if is_volume_invalid(sample):
+ error = InvalidVolumeError(sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_volume_required(order: OrderWithSamples, **kwargs) -> list[VolumeRequiredError]:
+ """
+ Validate that all samples have a volume if they are in a container.
+ Applicable to all order types.
+ """
+ errors: list[VolumeRequiredError] = []
+ for sample_index, sample in order.enumerated_samples:
+ if is_volume_missing(sample):
+ error = VolumeRequiredError(sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_wells_contain_at_most_one_sample(
+ order: OrderWithSamples,
+ **kwargs,
+) -> list[OccupiedWellError]:
+ """
+ Validate that the wells in the order contain at most one sample.
+ Applicable to all order types with non-indexed samples.
+ """
+ plate_samples = PlateSamplesValidator(order)
+ return plate_samples.get_occupied_well_errors()
+
+
+def validate_well_position_format(order: OrderWithSamples, **kwargs) -> list[WellFormatError]:
+ """
+ Validate that the well positions of all samples sent in plates have the correct format.
+ Applicable to all order types with non-indexed samples.
+ """
+ errors: list[WellFormatError] = []
+ for sample_index, sample in order.enumerated_samples:
+ if is_invalid_well_format(sample=sample):
+ error = WellFormatError(sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_well_position_rml_format(
+ order: OrderWithIndexedSamples, **kwargs
+) -> list[WellFormatRmlError]:
+ """
+ Validate that the well positions of all indexed samples have the correct format.
+ Applicable to all order types with indexed samples.
+ """
+ errors: list[WellFormatRmlError] = []
+ for sample_index, sample in order.enumerated_samples:
+ if sample.well_position_rml and is_invalid_well_format_rml(sample=sample):
+ error = WellFormatRmlError(sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def validate_well_positions_required(
+ order: OrderWithSamples,
+ **kwargs,
+) -> list[WellPositionMissingError]:
+ """
+ Validate that all samples sent in plates have well positions.
+ Applicable to all order types with non-indexed samples
+ """
+ plate_samples = PlateSamplesValidator(order)
+ return plate_samples.get_well_position_missing_errors()
+
+
+def validate_well_positions_required_rml(
+ order: OrderWithIndexedSamples, **kwargs
+) -> list[WellPositionRmlMissingError]:
+ """
+ Validate that all indexed samples have well positions.
+ Applicable to all order types with indexed samples.
+ """
+ errors: list[WellPositionRmlMissingError] = []
+ for sample_index, sample in order.enumerated_samples:
+ if sample.is_on_plate and not sample.well_position_rml:
+ error = WellPositionRmlMissingError(sample_index=sample_index)
+ errors.append(error)
+ return errors
diff --git a/cg/services/orders/validation/rules/sample/utils.py b/cg/services/orders/validation/rules/sample/utils.py
new file mode 100644
index 0000000000..15bf38f765
--- /dev/null
+++ b/cg/services/orders/validation/rules/sample/utils.py
@@ -0,0 +1,254 @@
+import re
+from collections import Counter
+
+from cg.models.orders.sample_base import ContainerEnum, ControlEnum
+from cg.services.orders.validation.constants import ALLOWED_SKIP_RC_BUFFERS, IndexEnum
+from cg.services.orders.validation.errors.sample_errors import (
+ BufferInvalidError,
+ ConcentrationInvalidIfSkipRCError,
+ ConcentrationRequiredError,
+ OccupiedWellError,
+ SampleError,
+ SampleNameNotAvailableControlError,
+ SampleNameNotAvailableError,
+ WellPositionMissingError,
+)
+from cg.services.orders.validation.index_sequences import INDEX_SEQUENCES
+from cg.services.orders.validation.models.order_with_samples import OrderWithSamples
+from cg.services.orders.validation.models.sample import Sample
+from cg.services.orders.validation.models.sample_aliases import IndexedSample
+from cg.services.orders.validation.rules.utils import (
+ get_application_concentration_interval,
+ get_concentration_interval,
+ has_sample_invalid_concentration,
+ is_sample_cfdna,
+)
+from cg.services.orders.validation.workflows.fastq.models.order import FastqOrder
+from cg.services.orders.validation.workflows.fastq.models.sample import FastqSample
+from cg.store.models import Application
+from cg.store.store import Store
+
+
+class PlateSamplesValidator:
+
+ def __init__(self, order: OrderWithSamples):
+ self.wells: dict[tuple[str, str], list[int]] = {}
+ self.plate_samples: list[tuple[int, Sample]] = []
+ self._initialize_wells(order)
+
+ def _initialize_wells(self, order: OrderWithSamples):
+ """
+ Construct a dict with keys being a (container_name, well_position) pair.
+ The value will be a list of sample indices for samples located in the well.
+ """
+ for sample_index, sample in order.enumerated_samples:
+ if sample.is_on_plate:
+ self.plate_samples.append((sample_index, sample))
+ key: tuple[str, str] = (sample.container_name, sample.well_position)
+ if not self.wells.get(key):
+ self.wells[key] = []
+ self.wells[key].append(sample_index)
+
+ def get_occupied_well_errors(self) -> list[OccupiedWellError]:
+ """Get errors for samples assigned to wells that are already occupied."""
+ conflicting_samples: list[int] = []
+ for samples_indices in self.wells.values():
+ if len(samples_indices) > 1:
+ conflicting_samples.extend(samples_indices[1:])
+ return get_occupied_well_errors(conflicting_samples)
+
+ def get_well_position_missing_errors(self) -> list[WellPositionMissingError]:
+ """Get errors for samples missing well positions."""
+ samples_missing_wells: list[int] = []
+ for sample_index, sample in self.plate_samples:
+ if not sample.well_position:
+ samples_missing_wells.append(sample_index)
+ return get_missing_well_errors(samples_missing_wells)
+
+
+def get_occupied_well_errors(sample_indices: list[int]) -> list[OccupiedWellError]:
+ return [OccupiedWellError(sample_index=sample_index) for sample_index in sample_indices]
+
+
+def get_missing_well_errors(sample_indices: list[int]) -> list[WellPositionMissingError]:
+ return [WellPositionMissingError(sample_index=sample_index) for sample_index in sample_indices]
+
+
+def get_indices_for_repeated_sample_names(order: OrderWithSamples) -> list[int]:
+ counter = Counter([sample.name for sample in order.samples])
+ indices: list[int] = []
+ for index, sample in order.enumerated_samples:
+ if counter.get(sample.name) > 1:
+ indices.append(index)
+ return indices
+
+
+def get_sample_name_not_available_errors(
+ order: OrderWithSamples, store: Store, has_order_control: bool
+) -> list[SampleError]:
+ """Return errors for non-control samples with names already used in the database."""
+ errors: list[SampleError] = []
+ customer = store.get_customer_by_internal_id(order.customer)
+ for sample_index, sample in order.enumerated_samples:
+ if store.get_sample_by_customer_and_name(
+ sample_name=sample.name, customer_entry_id=[customer.id]
+ ):
+ if is_sample_name_allowed_to_be_repeated(has_control=has_order_control, sample=sample):
+ continue
+ error = get_appropriate_sample_name_available_error(
+ has_control=has_order_control, sample_index=sample_index
+ )
+ errors.append(error)
+ return errors
+
+
+def is_sample_name_allowed_to_be_repeated(has_control: bool, sample: Sample) -> bool:
+ """
+ Return whether a sample name can be used if it is already in the database.
+ This is the case when the order has control samples and the sample is a control.
+ """
+ return has_control and sample.control in [ControlEnum.positive, ControlEnum.negative]
+
+
+def get_appropriate_sample_name_available_error(
+ has_control: bool, sample_index: int
+) -> SampleError:
+ """
+ Return the appropriate error for a sample name that is not available based on whether the
+ order has control samples or not.
+ """
+ if has_control:
+ return SampleNameNotAvailableControlError(sample_index=sample_index)
+ return SampleNameNotAvailableError(sample_index=sample_index)
+
+
+def is_tube_container_name_redundant(sample: Sample, counter: Counter) -> bool:
+ return sample.container == ContainerEnum.tube and counter.get(sample.container_name) > 1
+
+
+def get_indices_for_tube_repeated_container_name(order: OrderWithSamples) -> list[int]:
+ counter = Counter([sample.container_name for sample in order.samples])
+ indices: list[int] = []
+ for index, sample in order.enumerated_samples:
+ if is_tube_container_name_redundant(sample, counter):
+ indices.append(index)
+ return indices
+
+
+def is_invalid_well_format(sample: Sample) -> bool:
+ """Check if a sample has an invalid well format."""
+ correct_well_position_pattern: str = r"^[A-H]:([1-9]|1[0-2])$"
+ if sample.is_on_plate:
+ return not bool(re.match(correct_well_position_pattern, sample.well_position))
+ return False
+
+
+def is_invalid_well_format_rml(sample: IndexedSample) -> bool:
+ """Check if an indexed sample has an invalid well format."""
+ correct_well_position_pattern: str = r"^[A-H]:([1-9]|1[0-2])$"
+ return not bool(re.match(correct_well_position_pattern, sample.well_position_rml))
+
+
+def is_container_name_missing(sample: Sample) -> bool:
+ """Checks if a sample is missing its container name."""
+ if sample.is_on_plate and not sample.container_name:
+ return True
+ return False
+
+
+def create_invalid_concentration_error(
+ sample: FastqSample, sample_index: int, store: Store
+) -> ConcentrationInvalidIfSkipRCError:
+ application: Application = store.get_application_by_tag(sample.application)
+ is_cfdna: bool = is_sample_cfdna(sample)
+ allowed_interval: tuple[float, float] = get_application_concentration_interval(
+ application=application,
+ is_cfdna=is_cfdna,
+ )
+ return ConcentrationInvalidIfSkipRCError(
+ sample_index=sample_index,
+ allowed_interval=allowed_interval,
+ )
+
+
+def validate_concentration_interval(
+ order: FastqOrder, store: Store
+) -> list[ConcentrationInvalidIfSkipRCError]:
+ errors: list[ConcentrationInvalidIfSkipRCError] = []
+ for sample_index, sample in order.enumerated_samples:
+ if application := store.get_application_by_tag(sample.application):
+ allowed_interval: tuple[float, float] = get_concentration_interval(
+ sample=sample, application=application
+ )
+ if allowed_interval and has_sample_invalid_concentration(
+ sample=sample, allowed_interval=allowed_interval
+ ):
+ error: ConcentrationInvalidIfSkipRCError = create_invalid_concentration_error(
+ sample=sample,
+ sample_index=sample_index,
+ store=store,
+ )
+ errors.append(error)
+ return errors
+
+
+def validate_concentration_required(order: FastqOrder) -> list[ConcentrationRequiredError]:
+ errors: list[ConcentrationRequiredError] = []
+ for sample_index, sample in order.enumerated_samples:
+ if not sample.concentration_ng_ul:
+ error = ConcentrationRequiredError(sample_index=sample_index)
+ errors.append(error)
+ return errors
+
+
+def has_multiple_applications(samples: list[IndexedSample]) -> bool:
+ return len({sample.application for sample in samples}) > 1
+
+
+def has_multiple_priorities(samples: list[IndexedSample]) -> bool:
+ return len({sample.priority for sample in samples}) > 1
+
+
+def is_index_number_missing(sample: IndexedSample) -> bool:
+ """Checks if a sample is missing its index number.
+ Note: Index is an attribute on the sample, not its position in the list of samples."""
+ return sample.index != IndexEnum.NO_INDEX and not sample.index_number
+
+
+def is_index_number_out_of_range(sample: IndexedSample) -> bool:
+ """Validates that the sample's index number is in range for its specified index.
+ Note: Index number is an attribute on the sample, not its position in the list of samples."""
+ return sample.index_number and not (
+ 1 <= sample.index_number <= len(INDEX_SEQUENCES[sample.index])
+ )
+
+
+def is_index_sequence_missing(sample: IndexedSample) -> bool:
+ """Checks if a sample is missing its index number.
+ Note: Index sequence is an attribute on the sample, not its position in the list of samples."""
+ return sample.index != IndexEnum.NO_INDEX and not sample.index_sequence
+
+
+def is_index_sequence_mismatched(sample: IndexedSample) -> bool:
+ """Validates if the sample's index sequence matches the given index and index number.
+ The index numbers start at 1, creating an offset."""
+ return (
+ sample.index_sequence
+ and sample.index != IndexEnum.NO_INDEX
+ and not is_index_number_out_of_range(sample)
+ and INDEX_SEQUENCES[sample.index][sample.index_number - 1] != sample.index_sequence
+ )
+
+
+def validate_buffers_are_allowed(order: FastqOrder) -> list[BufferInvalidError]:
+ """
+ Validate that the order has only samples with buffers that allow to skip reception control.
+ We can only allow skipping reception control if there is no need to exchange buffer,
+ so if the sample has nuclease-free water or Tris-HCL as buffer.
+ """
+ errors: list[BufferInvalidError] = []
+ for sample_index, sample in order.enumerated_samples:
+ if sample.elution_buffer not in ALLOWED_SKIP_RC_BUFFERS:
+ error = BufferInvalidError(sample_index=sample_index)
+ errors.append(error)
+ return errors
diff --git a/cg/services/orders/validation/rules/utils.py b/cg/services/orders/validation/rules/utils.py
new file mode 100644
index 0000000000..1736ecae22
--- /dev/null
+++ b/cg/services/orders/validation/rules/utils.py
@@ -0,0 +1,83 @@
+from cg.constants.sample_sources import SourceType
+from cg.models.orders.constants import OrderType
+from cg.models.orders.sample_base import ContainerEnum
+from cg.services.orders.validation.constants import MAXIMUM_VOLUME, MINIMUM_VOLUME
+from cg.services.orders.validation.models.sample import Sample
+from cg.services.orders.validation.models.sample_aliases import SampleWithSkipRC
+from cg.store.models import Application
+from cg.store.store import Store
+
+
+def is_volume_invalid(sample: Sample) -> bool:
+ in_container: bool = is_in_container(sample.container)
+ allowed_volume: bool = is_volume_within_allowed_interval(sample.volume)
+ return in_container and not allowed_volume
+
+
+def is_in_container(container: ContainerEnum) -> bool:
+ return container != ContainerEnum.no_container
+
+
+def is_volume_within_allowed_interval(volume: int) -> bool:
+ return volume and (MINIMUM_VOLUME <= volume <= MAXIMUM_VOLUME)
+
+
+def is_sample_on_plate(sample: Sample) -> bool:
+ return sample.container == ContainerEnum.plate
+
+
+def is_application_compatible(
+ order_type: OrderType,
+ application_tag: str,
+ store: Store,
+) -> bool:
+ application: Application | None = store.get_application_by_tag(application_tag)
+ return not application or order_type in application.order_types
+
+
+def is_volume_missing(sample: Sample) -> bool:
+ """Check if a sample is missing its volume."""
+ if is_in_container(sample.container) and not sample.volume:
+ return True
+ return False
+
+
+def has_sample_invalid_concentration(
+ sample: SampleWithSkipRC, allowed_interval: tuple[float, float]
+) -> bool:
+ concentration: float | None = sample.concentration_ng_ul
+ return concentration and not is_sample_concentration_within_interval(
+ concentration=concentration, interval=allowed_interval
+ )
+
+
+def get_concentration_interval(
+ sample: SampleWithSkipRC, application: Application
+) -> tuple[float, float] | None:
+ is_cfdna: bool = is_sample_cfdna(sample)
+ allowed_interval: tuple[float, float] = get_application_concentration_interval(
+ application=application, is_cfdna=is_cfdna
+ )
+ return allowed_interval
+
+
+def is_sample_cfdna(sample: SampleWithSkipRC) -> bool:
+ source = sample.source
+ return source == SourceType.CELL_FREE_DNA
+
+
+def get_application_concentration_interval(
+ application: Application, is_cfdna: bool
+) -> tuple[float, float]:
+ if is_cfdna:
+ return (
+ application.sample_concentration_minimum_cfdna,
+ application.sample_concentration_maximum_cfdna,
+ )
+ return application.sample_concentration_minimum, application.sample_concentration_maximum
+
+
+def is_sample_concentration_within_interval(
+ concentration: float, interval: tuple[float, float]
+) -> bool:
+ return interval[0] <= concentration <= interval[1]
diff --git a/cg/services/orders/validation/service.py b/cg/services/orders/validation/service.py
new file mode 100644
index 0000000000..c020ece512
--- /dev/null
+++ b/cg/services/orders/validation/service.py
@@ -0,0 +1,101 @@
+import logging
+
+from cg.exc import OrderError as OrderValidationError
+from cg.models.orders.constants import OrderType
+from cg.services.orders.validation.errors.case_errors import CaseError
+from cg.services.orders.validation.errors.case_sample_errors import CaseSampleError
+from cg.services.orders.validation.errors.order_errors import OrderError
+from cg.services.orders.validation.errors.sample_errors import SampleError
+from cg.services.orders.validation.errors.validation_errors import ValidationErrors
+from cg.services.orders.validation.model_validator.model_validator import ModelValidator
+from cg.services.orders.validation.models.order import Order
+from cg.services.orders.validation.models.order_with_cases import OrderWithCases
+from cg.services.orders.validation.order_type_maps import (
+ ORDER_TYPE_MODEL_MAP,
+ ORDER_TYPE_RULE_SET_MAP,
+ RuleSet,
+)
+from cg.services.orders.validation.response_mapper import create_order_validation_response
+from cg.services.orders.validation.utils import (
+ apply_case_sample_validation,
+ apply_case_validation,
+ apply_order_validation,
+ apply_sample_validation,
+)
+from cg.store.store import Store
+
+LOG = logging.getLogger(__name__)
+
+
+class OrderValidationService:
+ def __init__(self, store: Store):
+ self.store = store
+
+ def get_validation_response(self, raw_order: dict, order_type: OrderType, user_id: int) -> dict:
+ model = ORDER_TYPE_MODEL_MAP[order_type]
+ rule_set = ORDER_TYPE_RULE_SET_MAP[order_type]
+ errors: ValidationErrors = self._get_errors(
+ raw_order=raw_order, model=model, rule_set=rule_set, user_id=user_id
+ )
+ return create_order_validation_response(raw_order=raw_order, errors=errors)
+
+ def parse_and_validate(self, raw_order: dict, order_type: OrderType, user_id: int) -> Order:
+ model = ORDER_TYPE_MODEL_MAP[order_type]
+ rule_set = ORDER_TYPE_RULE_SET_MAP[order_type]
+ parsed_order, errors = ModelValidator.validate(order=raw_order, model=model)
+ if parsed_order:
+ parsed_order._user_id = user_id
+ errors: ValidationErrors = self._get_rule_validation_errors(
+ order=parsed_order,
+ rule_set=rule_set,
+ )
+ if not errors.is_empty:
+ LOG.error(errors.get_error_message())
+ raise OrderValidationError(message="Order contained errors")
+ return parsed_order
+
+ def _get_errors(
+ self, raw_order: dict, model: type[Order], rule_set: RuleSet, user_id: int
+ ) -> ValidationErrors:
+ parsed_order, errors = ModelValidator.validate(order=raw_order, model=model)
+ if parsed_order:
+ parsed_order._user_id = user_id
+ errors: ValidationErrors = self._get_rule_validation_errors(
+ order=parsed_order, rule_set=rule_set
+ )
+ return errors
+
+ def _get_rule_validation_errors(self, order: Order, rule_set: RuleSet) -> ValidationErrors:
+
+ case_errors = []
+ case_sample_errors = []
+ order_errors: list[OrderError] = apply_order_validation(
+ rules=rule_set.order_rules,
+ order=order,
+ store=self.store,
+ )
+ sample_errors = []
+ if isinstance(order, OrderWithCases):
+ case_errors: list[CaseError] = apply_case_validation(
+ rules=rule_set.case_rules,
+ order=order,
+ store=self.store,
+ )
+ case_sample_errors: list[CaseSampleError] = apply_case_sample_validation(
+ rules=rule_set.case_sample_rules,
+ order=order,
+ store=self.store,
+ )
+ else:
+ sample_errors: list[SampleError] = apply_sample_validation(
+ rules=rule_set.sample_rules,
+ order=order,
+ store=self.store,
+ )
+
+ return ValidationErrors(
+ case_errors=case_errors,
+ case_sample_errors=case_sample_errors,
+ order_errors=order_errors,
+ sample_errors=sample_errors,
+ )
diff --git a/cg/services/orders/validation/utils.py b/cg/services/orders/validation/utils.py
new file mode 100644
index 0000000000..cc32c9905b
--- /dev/null
+++ b/cg/services/orders/validation/utils.py
@@ -0,0 +1,61 @@
+from typing import Callable
+
+from cg.models.orders.sample_base import ControlEnum
+from cg.services.orders.validation.constants import ElutionBuffer, ExtractionMethod
+from cg.services.orders.validation.errors.case_errors import CaseError
+from cg.services.orders.validation.errors.case_sample_errors import CaseSampleError
+from cg.services.orders.validation.errors.order_errors import OrderError
+from cg.services.orders.validation.errors.sample_errors import SampleError
+from cg.services.orders.validation.models.order import Order
+from cg.store.store import Store
+
+
+def apply_order_validation(rules: list[Callable], order: Order, store: Store) -> list[OrderError]:
+ errors: list[OrderError] = []
+ for rule in rules:
+ rule_errors: list[OrderError] = rule(order=order, store=store)
+ errors.extend(rule_errors)
+ return errors
+
+
+def apply_case_validation(rules: list[Callable], order: Order, store: Store) -> list[CaseError]:
+ errors: list[CaseError] = []
+ for rule in rules:
+ rule_errors: list[CaseError] = rule(order=order, store=store)
+ errors.extend(rule_errors)
+ return errors
+
+
+def apply_case_sample_validation(
+ rules: list[Callable], order: Order, store: Store
+) -> list[CaseSampleError]:
+ errors: list[CaseSampleError] = []
+ for rule in rules:
+ rule_errors: list[CaseSampleError] = rule(order=order, store=store)
+ errors.extend(rule_errors)
+ return errors
+
+
+def apply_sample_validation(rules: list[Callable], order: Order, store: Store) -> list[SampleError]:
+ errors: list[SampleError] = []
+ for rule in rules:
+ rule_errors: list[SampleError] = rule(order=order, store=store)
+ errors.extend(rule_errors)
+ return errors
+
+
+def parse_buffer(buffer: str | None) -> ElutionBuffer | None:
+ return ElutionBuffer.OTHER if buffer and buffer.startswith("Other") else buffer
+
+
+def parse_control(control: ControlEnum | None) -> ControlEnum:
+ """Convert the control value into one of the Enum values if it's None."""
+ return control or ControlEnum.not_control
+
+
+def parse_extraction_method(extraction_method: str | None) -> ExtractionMethod:
+ return (
+ ExtractionMethod.MAGNAPURE_96
+ if extraction_method and extraction_method.startswith(ExtractionMethod.MAGNAPURE_96)
+ else extraction_method
+ )
diff --git a/cg/services/orders/validation/workflows/__init__.py b/cg/services/orders/validation/workflows/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/cg/services/orders/validation/workflows/balsamic/constants.py b/cg/services/orders/validation/workflows/balsamic/constants.py
new file mode 100644
index 0000000000..2ece862334
--- /dev/null
+++ b/cg/services/orders/validation/workflows/balsamic/constants.py
@@ -0,0 +1,13 @@
+from enum import StrEnum
+
+from cg.constants import DataDelivery
+
+
+class BalsamicDeliveryType(StrEnum):
+ ANALYSIS = DataDelivery.ANALYSIS_FILES
+ ANALYSIS_SCOUT = DataDelivery.ANALYSIS_SCOUT
+ FASTQ_ANALYSIS = DataDelivery.FASTQ_ANALYSIS
+ FASTQ_SCOUT = DataDelivery.FASTQ_SCOUT
+ FASTQ_ANALYSIS_SCOUT = DataDelivery.FASTQ_ANALYSIS_SCOUT
+ SCOUT = DataDelivery.SCOUT
+ NO_DELIVERY = DataDelivery.NO_DELIVERY
diff --git a/cg/services/orders/validation/workflows/balsamic/models/case.py b/cg/services/orders/validation/workflows/balsamic/models/case.py
new file mode 100644
index 0000000000..03cda79259
--- /dev/null
+++ b/cg/services/orders/validation/workflows/balsamic/models/case.py
@@ -0,0 +1,16 @@
+from pydantic import Discriminator, Tag
+from typing_extensions import Annotated
+
+from cg.services.orders.validation.models.case import Case
+from cg.services.orders.validation.models.discriminators import has_internal_id
+from cg.services.orders.validation.models.existing_sample import ExistingSample
+from cg.services.orders.validation.workflows.balsamic.models.sample import BalsamicSample
+
+NewSample = Annotated[BalsamicSample, Tag("new")]
+OldSample = Annotated[ExistingSample, Tag("existing")]
+
+
+class BalsamicCase(Case):
+ cohorts: list[str] | None = None
+ samples: list[Annotated[NewSample | OldSample, Discriminator(has_internal_id)]]
+ synopsis: str | None = None
diff --git a/cg/services/orders/validation/workflows/balsamic/models/order.py b/cg/services/orders/validation/workflows/balsamic/models/order.py
new file mode 100644
index 0000000000..a8ca38de79
--- /dev/null
+++ b/cg/services/orders/validation/workflows/balsamic/models/order.py
@@ -0,0 +1,24 @@
+from pydantic import Discriminator, Tag
+from typing_extensions import Annotated
+
+from cg.services.orders.validation.models.discriminators import has_internal_id
+from cg.services.orders.validation.models.existing_case import ExistingCase
+from cg.services.orders.validation.models.order_with_cases import OrderWithCases
+from cg.services.orders.validation.workflows.balsamic.constants import BalsamicDeliveryType
+from cg.services.orders.validation.workflows.balsamic.models.case import BalsamicCase
+
+NewCase = Annotated[BalsamicCase, Tag("new")]
+OldCase = Annotated[ExistingCase, Tag("existing")]
+
+
+class BalsamicOrder(OrderWithCases):
+ cases: list[Annotated[NewCase | OldCase, Discriminator(has_internal_id)]]
+ delivery_type: BalsamicDeliveryType
+
+ @property
+ def enumerated_new_cases(self) -> list[tuple[int, BalsamicCase | ExistingCase]]:
+ cases: list[tuple[int, BalsamicCase | ExistingCase]] = []
+ for case_index, case in self.enumerated_cases:
+ if case.is_new:
+ cases.append((case_index, case))
+ return cases
diff --git a/cg/services/orders/validation/workflows/balsamic/models/sample.py b/cg/services/orders/validation/workflows/balsamic/models/sample.py
new file mode 100644
index 0000000000..dfa7ea6402
--- /dev/null
+++ b/cg/services/orders/validation/workflows/balsamic/models/sample.py
@@ -0,0 +1,28 @@
+from pydantic import BeforeValidator, Field
+from typing_extensions import Annotated
+
+from cg.models.orders.sample_base import NAME_PATTERN, ControlEnum, SexEnum, StatusEnum
+from cg.services.orders.validation.constants import ElutionBuffer, TissueBlockEnum
+from cg.services.orders.validation.models.sample import Sample
+from cg.services.orders.validation.utils import parse_buffer, parse_control
+
+
+class BalsamicSample(Sample):
+ age_at_sampling: float | None = None
+ capture_kit: str | None = None
+ comment: str | None = None
+ concentration_ng_ul: float | None = None
+ control: Annotated[ControlEnum, BeforeValidator(parse_control)] = ControlEnum.not_control
+ elution_buffer: Annotated[ElutionBuffer | None, BeforeValidator(parse_buffer)] = None
+ formalin_fixation_time: int | None = None
+ phenotype_groups: list[str] | None = None
+ phenotype_terms: list[str] | None = None
+ post_formalin_fixation_time: int | None = None
+ require_qc_ok: bool = False
+ sex: SexEnum
+ source: str
+ status: StatusEnum | None = None
+ subject_id: str = Field(pattern=NAME_PATTERN, max_length=128)
+ tissue_block_size: TissueBlockEnum | None = None
+ tumour: bool = False
+ tumour_purity: int | None = None
diff --git a/cg/services/orders/validation/workflows/balsamic/validation_rules.py b/cg/services/orders/validation/workflows/balsamic/validation_rules.py
new file mode 100644
index 0000000000..140fcc2bf6
--- /dev/null
+++ b/cg/services/orders/validation/workflows/balsamic/validation_rules.py
@@ -0,0 +1,66 @@
+from cg.services.orders.validation.rules.case.rules import (
+ validate_at_most_two_samples_per_case,
+ validate_case_internal_ids_exist,
+ validate_case_names_available,
+ validate_case_names_not_repeated,
+ validate_existing_cases_belong_to_collaboration,
+ validate_number_of_normal_samples,
+)
+from cg.services.orders.validation.rules.case_sample.rules import (
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_application_not_archived,
+ validate_buffer_required,
+ validate_buffer_skip_rc_condition,
+ validate_capture_kit_panel_requirement,
+ validate_concentration_interval_if_skip_rc,
+ validate_concentration_required_if_skip_rc,
+ validate_container_name_required,
+ validate_existing_samples_belong_to_collaboration,
+ validate_sample_names_different_from_case_names,
+ validate_sample_names_not_repeated,
+ validate_samples_exist,
+ validate_subject_ids_different_from_case_names,
+ validate_subject_ids_different_from_sample_names,
+ validate_subject_sex_consistency,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+)
+
+BALSAMIC_CASE_RULES: list[callable] = [
+ validate_at_most_two_samples_per_case,
+ validate_case_internal_ids_exist,
+ validate_case_names_available,
+ validate_case_names_not_repeated,
+ validate_existing_cases_belong_to_collaboration,
+ validate_number_of_normal_samples,
+]
+
+BALSAMIC_CASE_SAMPLE_RULES: list[callable] = [
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_application_not_archived,
+ validate_buffer_required,
+ validate_buffer_skip_rc_condition,
+ validate_capture_kit_panel_requirement,
+ validate_volume_required,
+ validate_concentration_interval_if_skip_rc,
+ validate_concentration_required_if_skip_rc,
+ validate_container_name_required,
+ validate_existing_samples_belong_to_collaboration,
+ validate_samples_exist,
+ validate_sample_names_different_from_case_names,
+ validate_sample_names_not_repeated,
+ validate_subject_sex_consistency,
+ validate_subject_ids_different_from_case_names,
+ validate_subject_ids_different_from_sample_names,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+]
diff --git a/cg/services/orders/validation/workflows/balsamic_umi/constants.py b/cg/services/orders/validation/workflows/balsamic_umi/constants.py
new file mode 100644
index 0000000000..a1837697ae
--- /dev/null
+++ b/cg/services/orders/validation/workflows/balsamic_umi/constants.py
@@ -0,0 +1,13 @@
+from enum import StrEnum
+
+from cg.constants import DataDelivery
+
+
+class BalsamicUmiDeliveryType(StrEnum):
+ ANALYSIS = DataDelivery.ANALYSIS_FILES
+ ANALYSIS_SCOUT = DataDelivery.ANALYSIS_SCOUT
+ FASTQ_ANALYSIS = DataDelivery.FASTQ_ANALYSIS
+ FASTQ_ANALYSIS_SCOUT = DataDelivery.FASTQ_ANALYSIS_SCOUT
+ FASTQ_SCOUT = DataDelivery.FASTQ_SCOUT
+ NO_DELIVERY = DataDelivery.NO_DELIVERY
+ SCOUT = DataDelivery.SCOUT
diff --git a/cg/services/orders/validation/workflows/balsamic_umi/models/case.py b/cg/services/orders/validation/workflows/balsamic_umi/models/case.py
new file mode 100644
index 0000000000..9452bec2bc
--- /dev/null
+++ b/cg/services/orders/validation/workflows/balsamic_umi/models/case.py
@@ -0,0 +1,14 @@
+from pydantic import Discriminator, Tag
+from typing_extensions import Annotated
+
+from cg.services.orders.validation.models.discriminators import has_internal_id
+from cg.services.orders.validation.models.existing_sample import ExistingSample
+from cg.services.orders.validation.workflows.balsamic.models.case import BalsamicCase
+from cg.services.orders.validation.workflows.balsamic_umi.models.sample import BalsamicUmiSample
+
+NewSample = Annotated[BalsamicUmiSample, Tag("new")]
+OldSample = Annotated[ExistingSample, Tag("existing")]
+
+
+class BalsamicUmiCase(BalsamicCase):
+ samples: list[Annotated[NewSample | OldSample, Discriminator(has_internal_id)]]
diff --git a/cg/services/orders/validation/workflows/balsamic_umi/models/order.py b/cg/services/orders/validation/workflows/balsamic_umi/models/order.py
new file mode 100644
index 0000000000..b327ee6cfd
--- /dev/null
+++ b/cg/services/orders/validation/workflows/balsamic_umi/models/order.py
@@ -0,0 +1,24 @@
+from pydantic import Discriminator, Tag
+from typing_extensions import Annotated
+
+from cg.services.orders.validation.models.discriminators import has_internal_id
+from cg.services.orders.validation.models.existing_case import ExistingCase
+from cg.services.orders.validation.workflows.balsamic.models.order import BalsamicOrder
+from cg.services.orders.validation.workflows.balsamic_umi.constants import BalsamicUmiDeliveryType
+from cg.services.orders.validation.workflows.balsamic_umi.models.case import BalsamicUmiCase
+
+NewCase = Annotated[BalsamicUmiCase, Tag("new")]
+OldCase = Annotated[ExistingCase, Tag("existing")]
+
+
+class BalsamicUmiOrder(BalsamicOrder):
+ cases: list[Annotated[NewCase | OldCase, Discriminator(has_internal_id)]]
+ delivery_type: BalsamicUmiDeliveryType
+
+ @property
+ def enumerated_new_cases(self) -> list[tuple[int, BalsamicUmiCase | ExistingCase]]:
+ cases: list[tuple[int, BalsamicUmiCase | ExistingCase]] = []
+ for case_index, case in self.enumerated_cases:
+ if case.is_new:
+ cases.append((case_index, case))
+ return cases
diff --git a/cg/services/orders/validation/workflows/balsamic_umi/models/sample.py b/cg/services/orders/validation/workflows/balsamic_umi/models/sample.py
new file mode 100644
index 0000000000..9a325780bd
--- /dev/null
+++ b/cg/services/orders/validation/workflows/balsamic_umi/models/sample.py
@@ -0,0 +1,5 @@
+from cg.services.orders.validation.workflows.balsamic.models.sample import BalsamicSample
+
+
+class BalsamicUmiSample(BalsamicSample):
+ pass
diff --git a/cg/services/orders/validation/workflows/balsamic_umi/validation_rules.py b/cg/services/orders/validation/workflows/balsamic_umi/validation_rules.py
new file mode 100644
index 0000000000..afd21d659d
--- /dev/null
+++ b/cg/services/orders/validation/workflows/balsamic_umi/validation_rules.py
@@ -0,0 +1,7 @@
+from cg.services.orders.validation.workflows.balsamic.validation_rules import (
+ BALSAMIC_CASE_RULES,
+ BALSAMIC_CASE_SAMPLE_RULES,
+)
+
+BALSAMIC_UMI_CASE_RULES: list[callable] = BALSAMIC_CASE_RULES.copy()
+BALSAMIC_UMI_CASE_SAMPLE_RULES: list[callable] = BALSAMIC_CASE_SAMPLE_RULES.copy()
diff --git a/cg/services/orders/validation/workflows/fastq/constants.py b/cg/services/orders/validation/workflows/fastq/constants.py
new file mode 100644
index 0000000000..1e9c70dc42
--- /dev/null
+++ b/cg/services/orders/validation/workflows/fastq/constants.py
@@ -0,0 +1,8 @@
+from enum import StrEnum
+
+from cg.constants import DataDelivery
+
+
+class FastqDeliveryType(StrEnum):
+ FASTQ = DataDelivery.FASTQ
+ NO_DELIVERY = DataDelivery.NO_DELIVERY
diff --git a/cg/services/orders/validation/workflows/fastq/models/order.py b/cg/services/orders/validation/workflows/fastq/models/order.py
new file mode 100644
index 0000000000..b4acfcf245
--- /dev/null
+++ b/cg/services/orders/validation/workflows/fastq/models/order.py
@@ -0,0 +1,12 @@
+from cg.services.orders.validation.models.order_with_samples import OrderWithSamples
+from cg.services.orders.validation.workflows.fastq.constants import FastqDeliveryType
+from cg.services.orders.validation.workflows.fastq.models.sample import FastqSample
+
+
+class FastqOrder(OrderWithSamples):
+ delivery_type: FastqDeliveryType
+ samples: list[FastqSample]
+
+ @property
+ def enumerated_samples(self) -> enumerate[FastqSample]:
+ return enumerate(self.samples)
diff --git a/cg/services/orders/validation/workflows/fastq/models/sample.py b/cg/services/orders/validation/workflows/fastq/models/sample.py
new file mode 100644
index 0000000000..ae77340424
--- /dev/null
+++ b/cg/services/orders/validation/workflows/fastq/models/sample.py
@@ -0,0 +1,20 @@
+from pydantic import BeforeValidator, Field
+from typing_extensions import Annotated
+
+from cg.models.orders.sample_base import NAME_PATTERN, PriorityEnum, SexEnum
+from cg.services.orders.validation.constants import ElutionBuffer
+from cg.services.orders.validation.models.sample import Sample
+from cg.services.orders.validation.utils import parse_buffer
+
+
+class FastqSample(Sample):
+ capture_kit: str | None = None
+ concentration_ng_ul: float | None = None
+ elution_buffer: Annotated[ElutionBuffer, BeforeValidator(parse_buffer)]
+ priority: PriorityEnum
+ quantity: int | None = None
+ require_qc_ok: bool = False
+ sex: SexEnum
+ source: str
+ subject_id: str = Field(pattern=NAME_PATTERN, max_length=128)
+ tumour: bool = False
diff --git a/cg/services/orders/validation/workflows/fastq/validation_rules.py b/cg/services/orders/validation/workflows/fastq/validation_rules.py
new file mode 100644
index 0000000000..a93fa75f61
--- /dev/null
+++ b/cg/services/orders/validation/workflows/fastq/validation_rules.py
@@ -0,0 +1,35 @@
+from cg.services.orders.validation.rules.sample.rules import (
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_buffer_skip_rc_condition,
+ validate_concentration_interval_if_skip_rc,
+ validate_concentration_required_if_skip_rc,
+ validate_container_name_required,
+ validate_sample_names_available,
+ validate_sample_names_unique,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+)
+
+FASTQ_SAMPLE_RULES: list[callable] = [
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_buffer_skip_rc_condition,
+ validate_concentration_required_if_skip_rc,
+ validate_concentration_interval_if_skip_rc,
+ validate_container_name_required,
+ validate_sample_names_available,
+ validate_sample_names_unique,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_wells_contain_at_most_one_sample,
+ validate_well_position_format,
+ validate_well_positions_required,
+]
diff --git a/cg/services/orders/validation/workflows/fluffy/constants.py b/cg/services/orders/validation/workflows/fluffy/constants.py
new file mode 100644
index 0000000000..5ccdca7e2c
--- /dev/null
+++ b/cg/services/orders/validation/workflows/fluffy/constants.py
@@ -0,0 +1,8 @@
+from enum import StrEnum
+
+from cg.constants import DataDelivery
+
+
+class FluffyDeliveryType(StrEnum):
+ STATINA = DataDelivery.STATINA
+ NO_DELIVERY = DataDelivery.NO_DELIVERY
diff --git a/cg/services/orders/validation/workflows/fluffy/models/order.py b/cg/services/orders/validation/workflows/fluffy/models/order.py
new file mode 100644
index 0000000000..d20c3afa2b
--- /dev/null
+++ b/cg/services/orders/validation/workflows/fluffy/models/order.py
@@ -0,0 +1,34 @@
+from cg.services.orders.validation.models.order_with_samples import OrderWithSamples
+from cg.services.orders.validation.workflows.fluffy.constants import FluffyDeliveryType
+from cg.services.orders.validation.workflows.fluffy.models.sample import FluffySample
+
+
+class FluffyOrder(OrderWithSamples):
+ delivery_type: FluffyDeliveryType
+ samples: list[FluffySample]
+
+ @property
+ def enumerated_samples(self) -> enumerate[FluffySample]:
+ return enumerate(self.samples)
+
+ @property
+ def pools(self) -> dict[str, list[FluffySample]]:
+ """Return a dictionary matching pool names and their respective samples."""
+ pools: dict[str, list[FluffySample]] = {}
+ for sample in self.samples:
+ if sample.pool not in pools:
+ pools[sample.pool] = [sample]
+ else:
+ pools[sample.pool].append(sample)
+ return pools
+
+ @property
+ def enumerated_pools(self) -> dict[str, list[tuple[int, FluffySample]]]:
+ """Return the pool dictionary with indexes for the samples to map them to validation errors."""
+ pools: dict[str, list[tuple[int, FluffySample]]] = {}
+ for sample_index, sample in self.enumerated_samples:
+ if sample.pool not in pools:
+ pools[sample.pool] = [(sample_index, sample)]
+ else:
+ pools[sample.pool].append((sample_index, sample))
+ return pools
diff --git a/cg/services/orders/validation/workflows/fluffy/models/sample.py b/cg/services/orders/validation/workflows/fluffy/models/sample.py
new file mode 100644
index 0000000000..c035275300
--- /dev/null
+++ b/cg/services/orders/validation/workflows/fluffy/models/sample.py
@@ -0,0 +1,39 @@
+import logging
+
+from pydantic import BeforeValidator, Field, model_validator
+from typing_extensions import Annotated
+
+from cg.models.orders.sample_base import ContainerEnum, ControlEnum, PriorityEnum
+from cg.services.orders.validation.constants import IndexEnum
+from cg.services.orders.validation.index_sequences import INDEX_SEQUENCES
+from cg.services.orders.validation.models.sample import Sample
+from cg.services.orders.validation.utils import parse_control
+
+LOG = logging.getLogger(__name__)
+
+
+class FluffySample(Sample):
+ concentration: float
+ concentration_sample: float | None = None
+ container: ContainerEnum | None = Field(default=None, exclude=True)
+ control: Annotated[ControlEnum, BeforeValidator(parse_control)] = ControlEnum.not_control
+ priority: PriorityEnum
+ index: IndexEnum
+ index_number: int | None = None
+ index_sequence: str | None = None
+ pool: str
+ rml_plate_name: str | None = None
+ volume: int
+ well_position_rml: str | None = None
+
+ @model_validator(mode="after")
+ def set_default_index_sequence(self) -> "FluffySample":
+ """Set a default index_sequence from the index and index_number."""
+ if not self.index_sequence and (self.index and self.index_number):
+ try:
+ self.index_sequence = INDEX_SEQUENCES[self.index][self.index_number - 1]
+ except Exception:
+ LOG.warning(
+ f"No index sequence set and no suitable sequence found for index {self.index}, number {self.index_number}"
+ )
+ return self
diff --git a/cg/services/orders/validation/workflows/fluffy/validation_rules.py b/cg/services/orders/validation/workflows/fluffy/validation_rules.py
new file mode 100644
index 0000000000..4d828907d5
--- /dev/null
+++ b/cg/services/orders/validation/workflows/fluffy/validation_rules.py
@@ -0,0 +1,37 @@
+from cg.services.orders.validation.rules.sample.rules import (
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_index_number_in_range,
+ validate_index_number_required,
+ validate_index_sequence_mismatch,
+ validate_index_sequence_required,
+ validate_pools_contain_one_application,
+ validate_pools_contain_one_priority,
+ validate_sample_names_available,
+ validate_sample_names_unique,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_rml_format,
+ validate_well_positions_required_rml,
+)
+
+FLUFFY_SAMPLE_RULES: list[callable] = [
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_index_number_required,
+ validate_index_number_in_range,
+ validate_index_sequence_required,
+ validate_index_sequence_mismatch,
+ validate_pools_contain_one_application,
+ validate_pools_contain_one_priority,
+ validate_sample_names_available,
+ validate_sample_names_unique,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_rml_format,
+ validate_well_positions_required_rml,
+]
diff --git a/cg/services/orders/validation/workflows/metagenome/constants.py b/cg/services/orders/validation/workflows/metagenome/constants.py
new file mode 100644
index 0000000000..c6ba881a3a
--- /dev/null
+++ b/cg/services/orders/validation/workflows/metagenome/constants.py
@@ -0,0 +1,8 @@
+from enum import StrEnum
+
+from cg.constants import DataDelivery
+
+
+class MetagenomeDeliveryType(StrEnum):
+ FASTQ = DataDelivery.FASTQ
+ NO_DELIVERY = DataDelivery.NO_DELIVERY
diff --git a/cg/services/orders/validation/workflows/metagenome/models/order.py b/cg/services/orders/validation/workflows/metagenome/models/order.py
new file mode 100644
index 0000000000..43a2417700
--- /dev/null
+++ b/cg/services/orders/validation/workflows/metagenome/models/order.py
@@ -0,0 +1,12 @@
+from cg.services.orders.validation.models.order_with_samples import OrderWithSamples
+from cg.services.orders.validation.workflows.metagenome.constants import MetagenomeDeliveryType
+from cg.services.orders.validation.workflows.metagenome.models.sample import MetagenomeSample
+
+
+class MetagenomeOrder(OrderWithSamples):
+ delivery_type: MetagenomeDeliveryType
+ samples: list[MetagenomeSample]
+
+ @property
+ def enumerated_samples(self) -> enumerate[MetagenomeSample]:
+ return enumerate(self.samples)
diff --git a/cg/services/orders/validation/workflows/metagenome/models/sample.py b/cg/services/orders/validation/workflows/metagenome/models/sample.py
new file mode 100644
index 0000000000..75fbb4dc76
--- /dev/null
+++ b/cg/services/orders/validation/workflows/metagenome/models/sample.py
@@ -0,0 +1,17 @@
+from pydantic import BeforeValidator
+from typing_extensions import Annotated
+
+from cg.models.orders.sample_base import ControlEnum, PriorityEnum
+from cg.services.orders.validation.constants import ElutionBuffer
+from cg.services.orders.validation.models.sample import Sample
+from cg.services.orders.validation.utils import parse_buffer, parse_control
+
+
+class MetagenomeSample(Sample):
+ concentration_ng_ul: float | None = None
+ control: Annotated[ControlEnum, BeforeValidator(parse_control)] = ControlEnum.not_control
+ elution_buffer: Annotated[ElutionBuffer, BeforeValidator(parse_buffer)]
+ priority: PriorityEnum
+ quantity: int | None = None
+ require_qc_ok: bool = False
+ source: str
diff --git a/cg/services/orders/validation/workflows/metagenome/validation_rules.py b/cg/services/orders/validation/workflows/metagenome/validation_rules.py
new file mode 100644
index 0000000000..0b37ea183d
--- /dev/null
+++ b/cg/services/orders/validation/workflows/metagenome/validation_rules.py
@@ -0,0 +1,29 @@
+from cg.services.orders.validation.rules.sample.rules import (
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_container_name_required,
+ validate_non_control_sample_names_available,
+ validate_sample_names_unique,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+)
+
+METAGENOME_SAMPLE_RULES: list[callable] = [
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_container_name_required,
+ validate_non_control_sample_names_available,
+ validate_sample_names_unique,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+]
diff --git a/cg/services/orders/validation/workflows/microbial_fastq/constants.py b/cg/services/orders/validation/workflows/microbial_fastq/constants.py
new file mode 100644
index 0000000000..ad911d46cf
--- /dev/null
+++ b/cg/services/orders/validation/workflows/microbial_fastq/constants.py
@@ -0,0 +1,8 @@
+from enum import StrEnum
+
+from cg.constants import DataDelivery
+
+
+class MicrobialFastqDeliveryType(StrEnum):
+ FASTQ = DataDelivery.FASTQ
+ NO_DELIVERY = DataDelivery.NO_DELIVERY
diff --git a/cg/services/orders/validation/workflows/microbial_fastq/models/order.py b/cg/services/orders/validation/workflows/microbial_fastq/models/order.py
new file mode 100644
index 0000000000..82aff7470f
--- /dev/null
+++ b/cg/services/orders/validation/workflows/microbial_fastq/models/order.py
@@ -0,0 +1,16 @@
+from cg.services.orders.validation.models.order_with_samples import OrderWithSamples
+from cg.services.orders.validation.workflows.microbial_fastq.constants import (
+ MicrobialFastqDeliveryType,
+)
+from cg.services.orders.validation.workflows.microbial_fastq.models.sample import (
+ MicrobialFastqSample,
+)
+
+
+class MicrobialFastqOrder(OrderWithSamples):
+ delivery_type: MicrobialFastqDeliveryType
+ samples: list[MicrobialFastqSample]
+
+ @property
+ def enumerated_samples(self) -> enumerate[MicrobialFastqSample]:
+ return enumerate(self.samples)
diff --git a/cg/services/orders/validation/workflows/microbial_fastq/models/sample.py b/cg/services/orders/validation/workflows/microbial_fastq/models/sample.py
new file mode 100644
index 0000000000..ab74f89885
--- /dev/null
+++ b/cg/services/orders/validation/workflows/microbial_fastq/models/sample.py
@@ -0,0 +1,15 @@
+from pydantic import BeforeValidator
+from typing_extensions import Annotated
+
+from cg.models.orders.sample_base import ControlEnum, PriorityEnum
+from cg.services.orders.validation.constants import ElutionBuffer
+from cg.services.orders.validation.models.sample import Sample
+from cg.services.orders.validation.utils import parse_buffer, parse_control
+
+
+class MicrobialFastqSample(Sample):
+ control: Annotated[ControlEnum, BeforeValidator(parse_control)] = ControlEnum.not_control
+ elution_buffer: Annotated[ElutionBuffer, BeforeValidator(parse_buffer)]
+ priority: PriorityEnum
+ quantity: int | None = None
+ volume: int
diff --git a/cg/services/orders/validation/workflows/microbial_fastq/validation_rules.py b/cg/services/orders/validation/workflows/microbial_fastq/validation_rules.py
new file mode 100644
index 0000000000..7e57856714
--- /dev/null
+++ b/cg/services/orders/validation/workflows/microbial_fastq/validation_rules.py
@@ -0,0 +1,29 @@
+from cg.services.orders.validation.rules.sample.rules import (
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_container_name_required,
+ validate_non_control_sample_names_available,
+ validate_sample_names_unique,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+)
+
+MICROBIAL_FASTQ_SAMPLE_RULES: list[callable] = [
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_container_name_required,
+ validate_non_control_sample_names_available,
+ validate_sample_names_unique,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_wells_contain_at_most_one_sample,
+ validate_well_position_format,
+ validate_well_positions_required,
+]
diff --git a/cg/services/orders/validation/workflows/microsalt/constants.py b/cg/services/orders/validation/workflows/microsalt/constants.py
new file mode 100644
index 0000000000..bda53c6c74
--- /dev/null
+++ b/cg/services/orders/validation/workflows/microsalt/constants.py
@@ -0,0 +1,9 @@
+from enum import StrEnum
+
+from cg.constants import DataDelivery
+
+
+class MicrosaltDeliveryType(StrEnum):
+ FASTQ_QC = DataDelivery.FASTQ_QC
+ FASTQ_QC_ANALYSIS = DataDelivery.FASTQ_QC_ANALYSIS
+ NO_DELIVERY = DataDelivery.NO_DELIVERY
diff --git a/cg/services/orders/validation/workflows/microsalt/models/order.py b/cg/services/orders/validation/workflows/microsalt/models/order.py
new file mode 100644
index 0000000000..4b16e17686
--- /dev/null
+++ b/cg/services/orders/validation/workflows/microsalt/models/order.py
@@ -0,0 +1,12 @@
+from cg.services.orders.validation.models.order_with_samples import OrderWithSamples
+from cg.services.orders.validation.workflows.microsalt.constants import MicrosaltDeliveryType
+from cg.services.orders.validation.workflows.microsalt.models.sample import MicrosaltSample
+
+
+class MicrosaltOrder(OrderWithSamples):
+ delivery_type: MicrosaltDeliveryType
+ samples: list[MicrosaltSample]
+
+ @property
+ def enumerated_samples(self) -> enumerate[MicrosaltSample]:
+ return enumerate(self.samples)
diff --git a/cg/services/orders/validation/workflows/microsalt/models/sample.py b/cg/services/orders/validation/workflows/microsalt/models/sample.py
new file mode 100644
index 0000000000..86a49f1082
--- /dev/null
+++ b/cg/services/orders/validation/workflows/microsalt/models/sample.py
@@ -0,0 +1,23 @@
+from pydantic import BeforeValidator, Field, PrivateAttr
+from typing_extensions import Annotated
+
+from cg.models.orders.sample_base import ControlEnum, PriorityEnum
+from cg.services.orders.validation.constants import ElutionBuffer, ExtractionMethod
+from cg.services.orders.validation.models.sample import Sample
+from cg.services.orders.validation.utils import parse_buffer, parse_control, parse_extraction_method
+
+
+class MicrosaltSample(Sample):
+ control: Annotated[ControlEnum, BeforeValidator(parse_control)] = ControlEnum.not_control
+ elution_buffer: Annotated[ElutionBuffer, BeforeValidator(parse_buffer)]
+ extraction_method: Annotated[ExtractionMethod, BeforeValidator(parse_extraction_method)]
+ organism: str
+ organism_other: str | None = None
+ priority: PriorityEnum
+ reference_genome: str = Field(max_length=255)
+ _verified_organism: bool | None = PrivateAttr(default=None)
+
+ def model_dump(self, **kwargs) -> dict:
+ data = super().model_dump(**kwargs)
+ data["verified_organism"] = self._verified_organism
+ return data
diff --git a/cg/services/orders/validation/workflows/microsalt/validation_rules.py b/cg/services/orders/validation/workflows/microsalt/validation_rules.py
new file mode 100644
index 0000000000..9a6770d5fc
--- /dev/null
+++ b/cg/services/orders/validation/workflows/microsalt/validation_rules.py
@@ -0,0 +1,29 @@
+from cg.services.orders.validation.rules.sample.rules import (
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_container_name_required,
+ validate_non_control_sample_names_available,
+ validate_sample_names_unique,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+)
+
+MICROSALT_SAMPLE_RULES: list[callable] = [
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_container_name_required,
+ validate_non_control_sample_names_available,
+ validate_sample_names_unique,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+]
diff --git a/cg/services/orders/validation/workflows/mip_dna/models/case.py b/cg/services/orders/validation/workflows/mip_dna/models/case.py
new file mode 100644
index 0000000000..1a77df2902
--- /dev/null
+++ b/cg/services/orders/validation/workflows/mip_dna/models/case.py
@@ -0,0 +1,23 @@
+from pydantic import Discriminator, Tag
+from typing_extensions import Annotated
+
+from cg.services.orders.validation.models.case import Case
+from cg.services.orders.validation.models.discriminators import has_internal_id
+from cg.services.orders.validation.models.existing_sample import ExistingSample
+from cg.services.orders.validation.workflows.mip_dna.models.sample import MipDnaSample
+
+NewSample = Annotated[MipDnaSample, Tag("new")]
+OldSample = Annotated[ExistingSample, Tag("existing")]
+
+
+class MipDnaCase(Case):
+ cohorts: list[str] | None = None
+ panels: list[str]
+ synopsis: str | None = None
+ samples: list[Annotated[NewSample | OldSample, Discriminator(has_internal_id)]]
+
+ def get_samples_with_father(self) -> list[tuple[MipDnaSample, int]]:
+ return [(sample, index) for index, sample in self.enumerated_samples if sample.father]
+
+ def get_samples_with_mother(self) -> list[tuple[MipDnaSample, int]]:
+ return [(sample, index) for index, sample in self.enumerated_samples if sample.mother]
diff --git a/cg/services/orders/validation/workflows/mip_dna/models/order.py b/cg/services/orders/validation/workflows/mip_dna/models/order.py
new file mode 100644
index 0000000000..9a35c17590
--- /dev/null
+++ b/cg/services/orders/validation/workflows/mip_dna/models/order.py
@@ -0,0 +1,14 @@
+from pydantic import Discriminator, Tag
+from typing_extensions import Annotated
+
+from cg.services.orders.validation.models.discriminators import has_internal_id
+from cg.services.orders.validation.models.existing_case import ExistingCase
+from cg.services.orders.validation.models.order_with_cases import OrderWithCases
+from cg.services.orders.validation.workflows.mip_dna.models.case import MipDnaCase
+
+NewCase = Annotated[MipDnaCase, Tag("new")]
+OldCase = Annotated[ExistingCase, Tag("existing")]
+
+
+class MipDnaOrder(OrderWithCases):
+ cases: list[Annotated[NewCase | OldCase, Discriminator(has_internal_id)]]
diff --git a/cg/services/orders/validation/workflows/mip_dna/models/sample.py b/cg/services/orders/validation/workflows/mip_dna/models/sample.py
new file mode 100644
index 0000000000..8b761a0d3f
--- /dev/null
+++ b/cg/services/orders/validation/workflows/mip_dna/models/sample.py
@@ -0,0 +1,26 @@
+from pydantic import BeforeValidator, Field
+from typing_extensions import Annotated
+
+from cg.models.orders.sample_base import NAME_PATTERN, ControlEnum, SexEnum, StatusEnum
+from cg.services.orders.validation.constants import ElutionBuffer, TissueBlockEnum
+from cg.services.orders.validation.models.sample import Sample
+from cg.services.orders.validation.utils import parse_buffer, parse_control
+
+
+class MipDnaSample(Sample):
+ age_at_sampling: float | None = None
+ control: Annotated[ControlEnum, BeforeValidator(parse_control)] = ControlEnum.not_control
+ elution_buffer: Annotated[ElutionBuffer | None, BeforeValidator(parse_buffer)] = None
+ father: str | None = Field(None, pattern=NAME_PATTERN)
+ formalin_fixation_time: int | None = None
+ mother: str | None = Field(None, pattern=NAME_PATTERN)
+ phenotype_groups: list[str] | None = None
+ phenotype_terms: list[str] | None = None
+ post_formalin_fixation_time: int | None = None
+ require_qc_ok: bool = False
+ sex: SexEnum
+ source: str
+ status: StatusEnum
+ subject_id: str = Field(pattern=NAME_PATTERN, max_length=128)
+ tissue_block_size: TissueBlockEnum | None = None
+ concentration_ng_ul: float | None = None
diff --git a/cg/services/orders/validation/workflows/mip_dna/validation_rules.py b/cg/services/orders/validation/workflows/mip_dna/validation_rules.py
new file mode 100644
index 0000000000..2f3805af35
--- /dev/null
+++ b/cg/services/orders/validation/workflows/mip_dna/validation_rules.py
@@ -0,0 +1,76 @@
+from cg.services.orders.validation.rules.case.rules import (
+ validate_case_internal_ids_exist,
+ validate_case_names_available,
+ validate_case_names_not_repeated,
+ validate_existing_cases_belong_to_collaboration,
+ validate_gene_panels_unique,
+)
+from cg.services.orders.validation.rules.case_sample.rules import (
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_application_not_archived,
+ validate_buffer_required,
+ validate_buffer_skip_rc_condition,
+ validate_concentration_interval_if_skip_rc,
+ validate_concentration_required_if_skip_rc,
+ validate_container_name_required,
+ validate_existing_samples_belong_to_collaboration,
+ validate_fathers_are_male,
+ validate_fathers_in_same_case_as_children,
+ validate_gene_panels_exist,
+ validate_mothers_are_female,
+ validate_mothers_in_same_case_as_children,
+ validate_not_all_samples_unknown_in_case,
+ validate_pedigree,
+ validate_sample_names_different_from_case_names,
+ validate_sample_names_not_repeated,
+ validate_samples_exist,
+ validate_subject_ids_different_from_case_names,
+ validate_subject_ids_different_from_sample_names,
+ validate_subject_sex_consistency,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+)
+
+MIP_DNA_CASE_RULES: list[callable] = [
+ validate_case_internal_ids_exist,
+ validate_case_names_available,
+ validate_case_names_not_repeated,
+ validate_existing_cases_belong_to_collaboration,
+ validate_gene_panels_exist,
+ validate_gene_panels_unique,
+]
+
+MIP_DNA_CASE_SAMPLE_RULES: list[callable] = [
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_application_not_archived,
+ validate_buffer_required,
+ validate_buffer_skip_rc_condition,
+ validate_concentration_interval_if_skip_rc,
+ validate_concentration_required_if_skip_rc,
+ validate_container_name_required,
+ validate_existing_samples_belong_to_collaboration,
+ validate_fathers_are_male,
+ validate_fathers_in_same_case_as_children,
+ validate_mothers_are_female,
+ validate_mothers_in_same_case_as_children,
+ validate_not_all_samples_unknown_in_case,
+ validate_pedigree,
+ validate_samples_exist,
+ validate_sample_names_different_from_case_names,
+ validate_sample_names_not_repeated,
+ validate_subject_ids_different_from_case_names,
+ validate_subject_ids_different_from_sample_names,
+ validate_subject_sex_consistency,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+ validate_well_position_format,
+]
diff --git a/cg/services/orders/validation/workflows/mip_rna/constants.py b/cg/services/orders/validation/workflows/mip_rna/constants.py
new file mode 100644
index 0000000000..9f1c768ae7
--- /dev/null
+++ b/cg/services/orders/validation/workflows/mip_rna/constants.py
@@ -0,0 +1,14 @@
+from enum import StrEnum
+
+from cg.constants import DataDelivery
+
+
+class MipRnaDeliveryType(StrEnum):
+ ANALYSIS = DataDelivery.ANALYSIS_FILES
+ ANALYSIS_SCOUT = DataDelivery.ANALYSIS_SCOUT
+ SCOUT = DataDelivery.SCOUT
+ FASTQ = DataDelivery.FASTQ
+ FASTQ_ANALYSIS = DataDelivery.FASTQ_ANALYSIS
+ FASTQ_SCOUT = DataDelivery.FASTQ_SCOUT
+ FASTQ_ANALYSIS_SCOUT = DataDelivery.FASTQ_ANALYSIS_SCOUT
+ NO_DELIVERY = DataDelivery.NO_DELIVERY
diff --git a/cg/services/orders/validation/workflows/mip_rna/models/case.py b/cg/services/orders/validation/workflows/mip_rna/models/case.py
new file mode 100644
index 0000000000..0214ba1ded
--- /dev/null
+++ b/cg/services/orders/validation/workflows/mip_rna/models/case.py
@@ -0,0 +1,16 @@
+from pydantic import Discriminator, Tag
+from typing_extensions import Annotated
+
+from cg.services.orders.validation.models.case import Case
+from cg.services.orders.validation.models.discriminators import has_internal_id
+from cg.services.orders.validation.models.existing_sample import ExistingSample
+from cg.services.orders.validation.workflows.mip_rna.models.sample import MipRnaSample
+
+NewSample = Annotated[MipRnaSample, Tag("new")]
+OldSample = Annotated[ExistingSample, Tag("existing")]
+
+
+class MipRnaCase(Case):
+ cohorts: list[str] | None = None
+ synopsis: str | None = None
+ samples: list[Annotated[NewSample | OldSample, Discriminator(has_internal_id)]]
diff --git a/cg/services/orders/validation/workflows/mip_rna/models/order.py b/cg/services/orders/validation/workflows/mip_rna/models/order.py
new file mode 100644
index 0000000000..3a009d3234
--- /dev/null
+++ b/cg/services/orders/validation/workflows/mip_rna/models/order.py
@@ -0,0 +1,16 @@
+from pydantic import Discriminator, Tag
+from typing_extensions import Annotated
+
+from cg.services.orders.validation.models.discriminators import has_internal_id
+from cg.services.orders.validation.models.existing_case import ExistingCase
+from cg.services.orders.validation.models.order_with_cases import OrderWithCases
+from cg.services.orders.validation.workflows.mip_rna.constants import MipRnaDeliveryType
+from cg.services.orders.validation.workflows.mip_rna.models.case import MipRnaCase
+
+NewCase = Annotated[MipRnaCase, Tag("new")]
+OldCase = Annotated[ExistingCase, Tag("existing")]
+
+
+class MipRnaOrder(OrderWithCases):
+ cases: list[Annotated[NewCase | OldCase, Discriminator(has_internal_id)]]
+ delivery_type: MipRnaDeliveryType
diff --git a/cg/services/orders/validation/workflows/mip_rna/models/sample.py b/cg/services/orders/validation/workflows/mip_rna/models/sample.py
new file mode 100644
index 0000000000..b0cdb90ec3
--- /dev/null
+++ b/cg/services/orders/validation/workflows/mip_rna/models/sample.py
@@ -0,0 +1,23 @@
+from pydantic import BeforeValidator, Field
+from typing_extensions import Annotated
+
+from cg.models.orders.sample_base import NAME_PATTERN, ControlEnum, SexEnum
+from cg.services.orders.validation.constants import ElutionBuffer, TissueBlockEnum
+from cg.services.orders.validation.models.sample import Sample
+from cg.services.orders.validation.utils import parse_buffer, parse_control
+
+
+class MipRnaSample(Sample):
+ age_at_sampling: float | None = None
+ concentration_ng_ul: float | None = None
+ control: Annotated[ControlEnum, BeforeValidator(parse_control)] = ControlEnum.not_control
+ elution_buffer: Annotated[ElutionBuffer | None, BeforeValidator(parse_buffer)] = None
+ formalin_fixation_time: int | None = None
+ phenotype_groups: list[str] | None = None
+ phenotype_terms: list[str] | None = None
+ post_formalin_fixation_time: int | None = None
+ require_qc_ok: bool = False
+ sex: SexEnum
+ source: str
+ subject_id: str = Field(pattern=NAME_PATTERN, max_length=128)
+ tissue_block_size: TissueBlockEnum | None = None
diff --git a/cg/services/orders/validation/workflows/mip_rna/validation_rules.py b/cg/services/orders/validation/workflows/mip_rna/validation_rules.py
new file mode 100644
index 0000000000..d9a11bc1ba
--- /dev/null
+++ b/cg/services/orders/validation/workflows/mip_rna/validation_rules.py
@@ -0,0 +1,58 @@
+from cg.services.orders.validation.rules.case.rules import (
+ validate_case_internal_ids_exist,
+ validate_case_names_available,
+ validate_case_names_not_repeated,
+ validate_existing_cases_belong_to_collaboration,
+)
+from cg.services.orders.validation.rules.case_sample.rules import (
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_application_not_archived,
+ validate_buffer_required,
+ validate_buffer_skip_rc_condition,
+ validate_concentration_interval_if_skip_rc,
+ validate_concentration_required_if_skip_rc,
+ validate_container_name_required,
+ validate_existing_samples_belong_to_collaboration,
+ validate_sample_names_different_from_case_names,
+ validate_sample_names_not_repeated,
+ validate_samples_exist,
+ validate_subject_ids_different_from_case_names,
+ validate_subject_ids_different_from_sample_names,
+ validate_subject_sex_consistency,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+)
+
+MIP_RNA_CASE_RULES: list[callable] = [
+ validate_case_internal_ids_exist,
+ validate_case_names_available,
+ validate_case_names_not_repeated,
+ validate_existing_cases_belong_to_collaboration,
+]
+
+MIP_RNA_CASE_SAMPLE_RULES: list[callable] = [
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_application_not_archived,
+ validate_buffer_required,
+ validate_buffer_skip_rc_condition,
+ validate_concentration_interval_if_skip_rc,
+ validate_concentration_required_if_skip_rc,
+ validate_container_name_required,
+ validate_existing_samples_belong_to_collaboration,
+ validate_sample_names_different_from_case_names,
+ validate_sample_names_not_repeated,
+ validate_samples_exist,
+ validate_subject_ids_different_from_case_names,
+ validate_subject_ids_different_from_sample_names,
+ validate_subject_sex_consistency,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+]
diff --git a/cg/services/orders/validation/workflows/mutant/constants.py b/cg/services/orders/validation/workflows/mutant/constants.py
new file mode 100644
index 0000000000..8285b3f19f
--- /dev/null
+++ b/cg/services/orders/validation/workflows/mutant/constants.py
@@ -0,0 +1,67 @@
+from enum import StrEnum
+
+from cg.constants import DataDelivery
+
+
+class MutantDeliveryType(StrEnum):
+ FASTQ_ANALYSIS = DataDelivery.FASTQ_ANALYSIS
+ NO_DELIVERY = DataDelivery.NO_DELIVERY
+
+
+class PreProcessingMethod(StrEnum):
+ COVID_PRIMER = "Qiagen SARS-CoV-2 Primer Panel"
+ COVID_SEQUENCING = "COVIDSeq"
+ OTHER = 'Other (specify in "Comments")'
+
+
+class Primer(StrEnum):
+ ILLUMINA = "Illumina Artic V3"
+ NANOPORE = "Nanopore Midnight V1"
+
+
+class Region(StrEnum):
+ STOCKHOLM = "Stockholm"
+ UPPSALA = "Uppsala"
+ SORMLAND = "Sörmland"
+ OSTERGOTLAND = "Östergötland"
+ JONKOPINGS_LAN = "Jönköpings län"
+ KRONOBERG = "Kronoberg"
+ KALMAR_LAN = "Kalmar län"
+ GOTLAND = "Gotland"
+ BLEKINGE = "Blekinge"
+ SKANE = "Skåne"
+ HALLAND = "Halland"
+ VASTRA_GOTALANDSREGIONEN = "Västra Götalandsregionen"
+ VARMLAND = "Värmland"
+ OREBRO_LAN = "Örebro län"
+ VASTMANLAND = "Västmanland"
+ DALARNA = "Dalarna"
+ GAVLEBORG = "Gävleborg"
+ VASTERNORRLAND = "Västernorrland"
+ JAMTLAND_HARJEDALEN = "Jämtland Härjedalen"
+ VASTERBOTTEN = "Västerbotten"
+ NORRBOTTEN = "Norrbotten"
+
+
+class SelectionCriteria(StrEnum):
+ ALLMAN_OVERVAKNING = "Allmän övervakning"
+ ALLMAN_OVERVAKNING_OPPENVARD = "Allmän övervakning öppenvård"
+ ALLMAN_OVERVAKNING_SLUTENVARD = "Allmän övervakning slutenvård"
+ UTLANDSVISTELSE = "Utlandsvistelse"
+ RIKTAD_INSAMLING = "Riktad insamling"
+ UTBROTT = "Utbrott"
+ VACCINATIONSGENOMBROTT = "Vaccinationsgenombrott"
+ REINFEKTION = "Reinfektion"
+ INFORMATION_SAKNAS = "Information saknas"
+
+
+class OriginalLab(StrEnum):
+ UNILABS_STOCKHOLM = "Unilabs Stockholm"
+ UNILABS_ESKILSTUNA_LABORATORIUM = "Unilabs Eskilstuna Laboratorium"
+ NORRLAND_UNIVERSITY_HOSPITAL = "Norrland University Hospital"
+ LANSSJUKHUSET_SUNDSVALL = "Länssjukhuset Sundsvall"
+ A05_DIAGNOSTICS = "A05 Diagnostics"
+ SYNLAB_MEDILAB = "Synlab Medilab"
+ KAROLINSKA_UNIVERSITY_HOSPITAL_SOLNA = "Karolinska University Hospital Solna"
+ KAROLINSKA_UNIVERSITY_HOSPITAL_HUDDINGE = "Karolinska University Hospital Huddinge"
+ LABORATORIEMEDICINSKT_CENTRUM_GOTLAND = "LaboratorieMedicinskt Centrum Gotland"
diff --git a/cg/services/orders/validation/workflows/mutant/models/order.py b/cg/services/orders/validation/workflows/mutant/models/order.py
new file mode 100644
index 0000000000..5c47295e3e
--- /dev/null
+++ b/cg/services/orders/validation/workflows/mutant/models/order.py
@@ -0,0 +1,12 @@
+from cg.services.orders.validation.models.order_with_samples import OrderWithSamples
+from cg.services.orders.validation.workflows.mutant.constants import MutantDeliveryType
+from cg.services.orders.validation.workflows.mutant.models.sample import MutantSample
+
+
+class MutantOrder(OrderWithSamples):
+ delivery_type: MutantDeliveryType
+ samples: list[MutantSample]
+
+ @property
+ def enumerated_samples(self) -> enumerate[MutantSample]:
+ return enumerate(self.samples)
diff --git a/cg/services/orders/validation/workflows/mutant/models/sample.py b/cg/services/orders/validation/workflows/mutant/models/sample.py
new file mode 100644
index 0000000000..2b0a972feb
--- /dev/null
+++ b/cg/services/orders/validation/workflows/mutant/models/sample.py
@@ -0,0 +1,67 @@
+from datetime import date
+
+from pydantic import BeforeValidator, PrivateAttr, field_serializer, model_validator
+from typing_extensions import Annotated
+
+from cg.constants.orderforms import ORIGINAL_LAB_ADDRESSES, REGION_CODES
+from cg.models.orders.sample_base import ControlEnum, PriorityEnum
+from cg.services.orders.validation.constants import ElutionBuffer, ExtractionMethod
+from cg.services.orders.validation.models.sample import Sample
+from cg.services.orders.validation.utils import parse_buffer, parse_control, parse_extraction_method
+from cg.services.orders.validation.workflows.mutant.constants import (
+ OriginalLab,
+ PreProcessingMethod,
+ Primer,
+ Region,
+ SelectionCriteria,
+)
+
+
+class MutantSample(Sample):
+ collection_date: date
+ concentration_sample: float | None = None
+ control: Annotated[ControlEnum, BeforeValidator(parse_control)] = ControlEnum.not_control
+ elution_buffer: Annotated[ElutionBuffer, BeforeValidator(parse_buffer)]
+ extraction_method: Annotated[ExtractionMethod, BeforeValidator(parse_extraction_method)]
+ _lab_code: str = PrivateAttr(default="SE100 Karolinska")
+ organism: str
+ organism_other: str | None = None
+ original_lab: OriginalLab
+ original_lab_address: str
+ pre_processing_method: PreProcessingMethod
+ primer: Primer
+ priority: PriorityEnum
+ quantity: int | None = None
+ reference_genome: str
+ region: Region
+ region_code: str
+ selection_criteria: SelectionCriteria
+ _verified_organism: bool | None = PrivateAttr(default=None)
+
+ @model_validator(mode="before")
+ @classmethod
+ def set_original_lab_address(cls, data: any) -> any:
+ if isinstance(data, dict):
+ is_set = bool(data.get("original_lab_address"))
+ if not is_set:
+ data["original_lab_address"] = ORIGINAL_LAB_ADDRESSES[data["original_lab"]]
+ return data
+
+ @model_validator(mode="before")
+ @classmethod
+ def set_region_code(cls, data: any) -> any:
+ if isinstance(data, dict):
+ is_set = bool(data.get("region_code"))
+ if not is_set:
+ data["region_code"] = REGION_CODES[data["region"]]
+ return data
+
+ @field_serializer("collection_date")
+ def serialize_collection_date(self, value: date) -> str:
+ return value.isoformat()
+
+ def model_dump(self, **kwargs) -> dict:
+ data = super().model_dump(**kwargs)
+ data["lab_code"] = self._lab_code
+ data["verified_organism"] = self._verified_organism
+ return data
diff --git a/cg/services/orders/validation/workflows/mutant/validation_rules.py b/cg/services/orders/validation/workflows/mutant/validation_rules.py
new file mode 100644
index 0000000000..5132a29895
--- /dev/null
+++ b/cg/services/orders/validation/workflows/mutant/validation_rules.py
@@ -0,0 +1,29 @@
+from cg.services.orders.validation.rules.sample.rules import (
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_container_name_required,
+ validate_non_control_sample_names_available,
+ validate_sample_names_unique,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+)
+
+MUTANT_SAMPLE_RULES: list[callable] = [
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_container_name_required,
+ validate_non_control_sample_names_available,
+ validate_volume_required,
+ validate_sample_names_unique,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+]
diff --git a/cg/services/orders/validation/workflows/order_validation_rules.py b/cg/services/orders/validation/workflows/order_validation_rules.py
new file mode 100644
index 0000000000..cb876ebbe6
--- /dev/null
+++ b/cg/services/orders/validation/workflows/order_validation_rules.py
@@ -0,0 +1,11 @@
+from cg.services.orders.validation.rules.order.rules import (
+ validate_customer_can_skip_reception_control,
+ validate_customer_exists,
+ validate_user_belongs_to_customer,
+)
+
+ORDER_RULES: list[callable] = [
+ validate_customer_can_skip_reception_control,
+ validate_customer_exists,
+ validate_user_belongs_to_customer,
+]
diff --git a/cg/services/orders/validation/workflows/pacbio_long_read/constants.py b/cg/services/orders/validation/workflows/pacbio_long_read/constants.py
new file mode 100644
index 0000000000..c6076fc5de
--- /dev/null
+++ b/cg/services/orders/validation/workflows/pacbio_long_read/constants.py
@@ -0,0 +1,8 @@
+from enum import StrEnum
+
+from cg.constants import DataDelivery
+
+
+class PacbioDeliveryType(StrEnum):
+ BAM = DataDelivery.BAM
+ NO_DELIVERY = DataDelivery.NO_DELIVERY
diff --git a/cg/services/orders/validation/workflows/pacbio_long_read/models/order.py b/cg/services/orders/validation/workflows/pacbio_long_read/models/order.py
new file mode 100644
index 0000000000..21139caa4e
--- /dev/null
+++ b/cg/services/orders/validation/workflows/pacbio_long_read/models/order.py
@@ -0,0 +1,12 @@
+from cg.services.orders.validation.models.order_with_samples import OrderWithSamples
+from cg.services.orders.validation.workflows.pacbio_long_read.constants import PacbioDeliveryType
+from cg.services.orders.validation.workflows.pacbio_long_read.models.sample import PacbioSample
+
+
+class PacbioOrder(OrderWithSamples):
+ delivery_type: PacbioDeliveryType
+ samples: list[PacbioSample]
+
+ @property
+ def enumerated_samples(self) -> enumerate[PacbioSample]:
+ return enumerate(self.samples)
diff --git a/cg/services/orders/validation/workflows/pacbio_long_read/models/sample.py b/cg/services/orders/validation/workflows/pacbio_long_read/models/sample.py
new file mode 100644
index 0000000000..a147315f88
--- /dev/null
+++ b/cg/services/orders/validation/workflows/pacbio_long_read/models/sample.py
@@ -0,0 +1,15 @@
+from pydantic import Field
+
+from cg.models.orders.sample_base import NAME_PATTERN, PriorityEnum, SexEnum
+from cg.services.orders.validation.models.sample import Sample
+
+
+class PacbioSample(Sample):
+ concentration_ng_ul: float | None = None
+ priority: PriorityEnum
+ quantity: int | None = None
+ require_qc_ok: bool = False
+ sex: SexEnum
+ source: str
+ subject_id: str | None = Field(pattern=NAME_PATTERN, max_length=128)
+ tumour: bool = False
diff --git a/cg/services/orders/validation/workflows/pacbio_long_read/validation_rules.py b/cg/services/orders/validation/workflows/pacbio_long_read/validation_rules.py
new file mode 100644
index 0000000000..264a228aef
--- /dev/null
+++ b/cg/services/orders/validation/workflows/pacbio_long_read/validation_rules.py
@@ -0,0 +1,27 @@
+from cg.services.orders.validation.rules.sample.rules import (
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_container_name_required,
+ validate_sample_names_available,
+ validate_sample_names_unique,
+ validate_tube_container_name_unique,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+)
+
+PACBIO_LONG_READ_SAMPLE_RULES: list[callable] = [
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_container_name_required,
+ validate_sample_names_available,
+ validate_sample_names_unique,
+ validate_tube_container_name_unique,
+ validate_volume_required,
+ validate_wells_contain_at_most_one_sample,
+ validate_well_position_format,
+ validate_well_positions_required,
+]
diff --git a/cg/services/orders/validation/workflows/rml/constants.py b/cg/services/orders/validation/workflows/rml/constants.py
new file mode 100644
index 0000000000..316cd64e96
--- /dev/null
+++ b/cg/services/orders/validation/workflows/rml/constants.py
@@ -0,0 +1,8 @@
+from enum import StrEnum
+
+from cg.constants.constants import DataDelivery
+
+
+class RmlDeliveryType(StrEnum):
+ FASTQ = DataDelivery.FASTQ
+ NO_DELIVERY = DataDelivery.NO_DELIVERY
diff --git a/cg/services/orders/validation/workflows/rml/models/order.py b/cg/services/orders/validation/workflows/rml/models/order.py
new file mode 100644
index 0000000000..96a14dbf43
--- /dev/null
+++ b/cg/services/orders/validation/workflows/rml/models/order.py
@@ -0,0 +1,34 @@
+from cg.services.orders.validation.models.order_with_samples import OrderWithSamples
+from cg.services.orders.validation.workflows.rml.constants import RmlDeliveryType
+from cg.services.orders.validation.workflows.rml.models.sample import RmlSample
+
+
+class RmlOrder(OrderWithSamples):
+ delivery_type: RmlDeliveryType
+ samples: list[RmlSample]
+
+ @property
+ def enumerated_samples(self) -> enumerate[RmlSample]:
+ return enumerate(self.samples)
+
+ @property
+ def pools(self) -> dict[str, list[RmlSample]]:
+ """Return a dictionary matching pool names and their respective samples."""
+ pools: dict[str, list[RmlSample]] = {}
+ for sample in self.samples:
+ if sample.pool not in pools:
+ pools[sample.pool] = [sample]
+ else:
+ pools[sample.pool].append(sample)
+ return pools
+
+ @property
+ def enumerated_pools(self) -> dict[str, list[tuple[int, RmlSample]]]:
+ """Return the pool dictionary with indexes for the samples to map them to validation errors."""
+ pools: dict[str, list[tuple[int, RmlSample]]] = {}
+ for sample_index, sample in self.enumerated_samples:
+ if sample.pool not in pools:
+ pools[sample.pool] = [(sample_index, sample)]
+ else:
+ pools[sample.pool].append((sample_index, sample))
+ return pools
diff --git a/cg/services/orders/validation/workflows/rml/models/sample.py b/cg/services/orders/validation/workflows/rml/models/sample.py
new file mode 100644
index 0000000000..9b0676b502
--- /dev/null
+++ b/cg/services/orders/validation/workflows/rml/models/sample.py
@@ -0,0 +1,39 @@
+import logging
+
+from pydantic import BeforeValidator, Field, model_validator
+from typing_extensions import Annotated
+
+from cg.models.orders.sample_base import ContainerEnum, ControlEnum, PriorityEnum
+from cg.services.orders.validation.constants import IndexEnum
+from cg.services.orders.validation.index_sequences import INDEX_SEQUENCES
+from cg.services.orders.validation.models.sample import Sample
+from cg.services.orders.validation.utils import parse_control
+
+LOG = logging.getLogger(__name__)
+
+
+class RmlSample(Sample):
+ concentration: float
+ concentration_sample: float | None = None
+ container: ContainerEnum | None = Field(default=None, exclude=True)
+ control: Annotated[ControlEnum, BeforeValidator(parse_control)] = ControlEnum.not_control
+ index: IndexEnum
+ index_number: int | None = None
+ index_sequence: str | None = None
+ pool: str
+ priority: PriorityEnum
+ rml_plate_name: str | None = None
+ volume: int
+ well_position_rml: str | None = None
+
+ @model_validator(mode="after")
+ def set_default_index_sequence(self) -> "RmlSample":
+ """Set a default index_sequence from the index and index_number."""
+ if not self.index_sequence and (self.index and self.index_number):
+ try:
+ self.index_sequence = INDEX_SEQUENCES[self.index][self.index_number - 1]
+ except Exception:
+ LOG.warning(
+ f"No index sequence set and no suitable sequence found for index {self.index}, number {self.index_number}"
+ )
+ return self
diff --git a/cg/services/orders/validation/workflows/rml/validation_rules.py b/cg/services/orders/validation/workflows/rml/validation_rules.py
new file mode 100644
index 0000000000..e229cf2da7
--- /dev/null
+++ b/cg/services/orders/validation/workflows/rml/validation_rules.py
@@ -0,0 +1,35 @@
+from cg.services.orders.validation.rules.sample.rules import (
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_index_number_in_range,
+ validate_index_number_required,
+ validate_index_sequence_mismatch,
+ validate_index_sequence_required,
+ validate_pools_contain_one_application,
+ validate_pools_contain_one_priority,
+ validate_sample_names_available,
+ validate_sample_names_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_rml_format,
+ validate_well_positions_required_rml,
+)
+
+RML_SAMPLE_RULES: list[callable] = [
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_index_number_required,
+ validate_index_number_in_range,
+ validate_index_sequence_required,
+ validate_index_sequence_mismatch,
+ validate_pools_contain_one_application,
+ validate_pools_contain_one_priority,
+ validate_sample_names_available,
+ validate_sample_names_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_rml_format,
+ validate_well_positions_required_rml,
+]
diff --git a/cg/services/orders/validation/workflows/rna_fusion/constants.py b/cg/services/orders/validation/workflows/rna_fusion/constants.py
new file mode 100644
index 0000000000..e7b6225186
--- /dev/null
+++ b/cg/services/orders/validation/workflows/rna_fusion/constants.py
@@ -0,0 +1,14 @@
+from enum import StrEnum
+
+from cg.constants.constants import DataDelivery
+
+
+class RnaFusionDeliveryType(StrEnum):
+ ANALYSIS_FILES = DataDelivery.ANALYSIS_FILES
+ ANALYSIS_SCOUT = DataDelivery.ANALYSIS_SCOUT
+ SCOUT = DataDelivery.SCOUT
+ FASTQ = DataDelivery.FASTQ
+ FASTQ_ANALYSIS = DataDelivery.FASTQ_ANALYSIS
+ FASTQ_SCOUT = DataDelivery.FASTQ_SCOUT
+ FASTQ_ANALYSIS_SCOUT = DataDelivery.FASTQ_ANALYSIS_SCOUT
+ NO_DELIVERY = DataDelivery.NO_DELIVERY
diff --git a/cg/services/orders/validation/workflows/rna_fusion/models/case.py b/cg/services/orders/validation/workflows/rna_fusion/models/case.py
new file mode 100644
index 0000000000..d750c4a24a
--- /dev/null
+++ b/cg/services/orders/validation/workflows/rna_fusion/models/case.py
@@ -0,0 +1,16 @@
+from pydantic import Discriminator, Tag
+from typing_extensions import Annotated
+
+from cg.services.orders.validation.models.case import Case
+from cg.services.orders.validation.models.discriminators import has_internal_id
+from cg.services.orders.validation.models.existing_sample import ExistingSample
+from cg.services.orders.validation.workflows.rna_fusion.models.sample import RnaFusionSample
+
+NewSample = Annotated[RnaFusionSample, Tag("new")]
+OldSample = Annotated[ExistingSample, Tag("existing")]
+
+
+class RnaFusionCase(Case):
+ cohorts: list[str] | None = None
+ synopsis: str | None = None
+ samples: list[Annotated[NewSample | OldSample, Discriminator(has_internal_id)]]
diff --git a/cg/services/orders/validation/workflows/rna_fusion/models/order.py b/cg/services/orders/validation/workflows/rna_fusion/models/order.py
new file mode 100644
index 0000000000..2fe61b6e92
--- /dev/null
+++ b/cg/services/orders/validation/workflows/rna_fusion/models/order.py
@@ -0,0 +1,16 @@
+from pydantic import Discriminator, Tag
+from typing_extensions import Annotated
+
+from cg.services.orders.validation.models.discriminators import has_internal_id
+from cg.services.orders.validation.models.existing_case import ExistingCase
+from cg.services.orders.validation.models.order_with_cases import OrderWithCases
+from cg.services.orders.validation.workflows.rna_fusion.constants import RnaFusionDeliveryType
+from cg.services.orders.validation.workflows.rna_fusion.models.case import RnaFusionCase
+
+NewCase = Annotated[RnaFusionCase, Tag("new")]
+OldCase = Annotated[ExistingCase, Tag("existing")]
+
+
+class RnaFusionOrder(OrderWithCases):
+ cases: list[Annotated[NewCase | OldCase, Discriminator(has_internal_id)]]
+ delivery_type: RnaFusionDeliveryType
diff --git a/cg/services/orders/validation/workflows/rna_fusion/models/sample.py b/cg/services/orders/validation/workflows/rna_fusion/models/sample.py
new file mode 100644
index 0000000000..fb08130ae7
--- /dev/null
+++ b/cg/services/orders/validation/workflows/rna_fusion/models/sample.py
@@ -0,0 +1,23 @@
+from pydantic import BeforeValidator, Field
+from typing_extensions import Annotated
+
+from cg.models.orders.sample_base import NAME_PATTERN, ControlEnum, SexEnum
+from cg.services.orders.validation.constants import ElutionBuffer, TissueBlockEnum
+from cg.services.orders.validation.models.sample import Sample
+from cg.services.orders.validation.utils import parse_buffer, parse_control
+
+
+class RnaFusionSample(Sample):
+ age_at_sampling: float | None = None
+ concentration_ng_ul: float | None = None
+ control: Annotated[ControlEnum, BeforeValidator(parse_control)] = ControlEnum.not_control
+ elution_buffer: Annotated[ElutionBuffer | None, BeforeValidator(parse_buffer)] = None
+ formalin_fixation_time: int | None = None
+ phenotype_groups: list[str] | None = None
+ phenotype_terms: list[str] | None = None
+ post_formalin_fixation_time: int | None = None
+ require_qc_ok: bool = False
+ sex: SexEnum
+ source: str
+ subject_id: str = Field(pattern=NAME_PATTERN, min_length=1, max_length=128)
+ tissue_block_size: TissueBlockEnum | None = None
diff --git a/cg/services/orders/validation/workflows/rna_fusion/validation_rules.py b/cg/services/orders/validation/workflows/rna_fusion/validation_rules.py
new file mode 100644
index 0000000000..9a23c6377a
--- /dev/null
+++ b/cg/services/orders/validation/workflows/rna_fusion/validation_rules.py
@@ -0,0 +1,62 @@
+from cg.services.orders.validation.rules.case.rules import (
+ validate_case_internal_ids_exist,
+ validate_case_names_available,
+ validate_case_names_not_repeated,
+ validate_existing_cases_belong_to_collaboration,
+ validate_one_sample_per_case,
+)
+from cg.services.orders.validation.rules.case_sample.rules import (
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_application_not_archived,
+ validate_buffer_required,
+ validate_buffer_skip_rc_condition,
+ validate_concentration_interval_if_skip_rc,
+ validate_concentration_required_if_skip_rc,
+ validate_container_name_required,
+ validate_existing_samples_belong_to_collaboration,
+ validate_sample_names_different_from_case_names,
+ validate_sample_names_not_repeated,
+ validate_samples_exist,
+ validate_subject_ids_different_from_case_names,
+ validate_subject_ids_different_from_sample_names,
+ validate_subject_sex_consistency,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+)
+
+RNAFUSION_CASE_RULES: list[callable] = [
+ validate_case_internal_ids_exist,
+ validate_case_names_available,
+ validate_case_names_not_repeated,
+ validate_existing_cases_belong_to_collaboration,
+ validate_one_sample_per_case,
+]
+
+RNAFUSION_CASE_SAMPLE_RULES: list[callable] = [
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_application_not_archived,
+ validate_buffer_required,
+ validate_buffer_skip_rc_condition,
+ validate_concentration_interval_if_skip_rc,
+ validate_concentration_required_if_skip_rc,
+ validate_container_name_required,
+ validate_existing_samples_belong_to_collaboration,
+ validate_samples_exist,
+ validate_sample_names_different_from_case_names,
+ validate_sample_names_not_repeated,
+ validate_subject_ids_different_from_case_names,
+ validate_subject_ids_different_from_sample_names,
+ validate_subject_sex_consistency,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+]
diff --git a/cg/services/orders/validation/workflows/taxprofiler/constants.py b/cg/services/orders/validation/workflows/taxprofiler/constants.py
new file mode 100644
index 0000000000..c10362c704
--- /dev/null
+++ b/cg/services/orders/validation/workflows/taxprofiler/constants.py
@@ -0,0 +1,9 @@
+from enum import StrEnum
+
+from cg.constants import DataDelivery
+
+
+class TaxprofilerDeliveryType(StrEnum):
+ ANALYSIS = DataDelivery.ANALYSIS_FILES
+ FASTQ_ANALYSIS = DataDelivery.FASTQ_ANALYSIS
+ NO_DELIVERY = DataDelivery.NO_DELIVERY
diff --git a/cg/services/orders/validation/workflows/taxprofiler/models/order.py b/cg/services/orders/validation/workflows/taxprofiler/models/order.py
new file mode 100644
index 0000000000..26a425c647
--- /dev/null
+++ b/cg/services/orders/validation/workflows/taxprofiler/models/order.py
@@ -0,0 +1,12 @@
+from cg.services.orders.validation.models.order_with_samples import OrderWithSamples
+from cg.services.orders.validation.workflows.taxprofiler.constants import TaxprofilerDeliveryType
+from cg.services.orders.validation.workflows.taxprofiler.models.sample import TaxprofilerSample
+
+
+class TaxprofilerOrder(OrderWithSamples):
+ delivery_type: TaxprofilerDeliveryType
+ samples: list[TaxprofilerSample]
+
+ @property
+ def enumerated_samples(self) -> enumerate[TaxprofilerSample]:
+ return enumerate(self.samples)
diff --git a/cg/services/orders/validation/workflows/taxprofiler/models/sample.py b/cg/services/orders/validation/workflows/taxprofiler/models/sample.py
new file mode 100644
index 0000000000..dc281c2d85
--- /dev/null
+++ b/cg/services/orders/validation/workflows/taxprofiler/models/sample.py
@@ -0,0 +1,17 @@
+from pydantic import BeforeValidator
+from typing_extensions import Annotated
+
+from cg.models.orders.sample_base import ControlEnum, PriorityEnum
+from cg.services.orders.validation.constants import ElutionBuffer
+from cg.services.orders.validation.models.sample import Sample
+from cg.services.orders.validation.utils import parse_buffer, parse_control
+
+
+class TaxprofilerSample(Sample):
+ concentration_ng_ul: float | None = None
+ control: Annotated[ControlEnum, BeforeValidator(parse_control)] = ControlEnum.not_control
+ elution_buffer: Annotated[ElutionBuffer, BeforeValidator(parse_buffer)]
+ priority: PriorityEnum
+ quantity: int | None = None
+ require_qc_ok: bool = False
+ source: str
diff --git a/cg/services/orders/validation/workflows/taxprofiler/validation_rules.py b/cg/services/orders/validation/workflows/taxprofiler/validation_rules.py
new file mode 100644
index 0000000000..790bdb8568
--- /dev/null
+++ b/cg/services/orders/validation/workflows/taxprofiler/validation_rules.py
@@ -0,0 +1,29 @@
+from cg.services.orders.validation.rules.sample.rules import (
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_container_name_required,
+ validate_non_control_sample_names_available,
+ validate_sample_names_unique,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+)
+
+TAXPROFILER_SAMPLE_RULES: list[callable] = [
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_container_name_required,
+ validate_non_control_sample_names_available,
+ validate_sample_names_unique,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+]
diff --git a/cg/services/orders/validation/workflows/tomte/constants.py b/cg/services/orders/validation/workflows/tomte/constants.py
new file mode 100644
index 0000000000..597536e16e
--- /dev/null
+++ b/cg/services/orders/validation/workflows/tomte/constants.py
@@ -0,0 +1,10 @@
+from enum import StrEnum
+
+from cg.constants.constants import DataDelivery
+
+
+class TomteDeliveryType(StrEnum):
+ ANALYSIS_FILES = DataDelivery.ANALYSIS_FILES
+ FASTQ = DataDelivery.FASTQ
+ FASTQ_ANALYSIS = DataDelivery.FASTQ_ANALYSIS
+ NO_DELIVERY = DataDelivery.NO_DELIVERY
diff --git a/cg/services/orders/validation/workflows/tomte/models/case.py b/cg/services/orders/validation/workflows/tomte/models/case.py
new file mode 100644
index 0000000000..ab4504e4ea
--- /dev/null
+++ b/cg/services/orders/validation/workflows/tomte/models/case.py
@@ -0,0 +1,23 @@
+from pydantic import Discriminator, Tag
+from typing_extensions import Annotated
+
+from cg.services.orders.validation.models.case import Case
+from cg.services.orders.validation.models.discriminators import has_internal_id
+from cg.services.orders.validation.models.existing_sample import ExistingSample
+from cg.services.orders.validation.workflows.tomte.models.sample import TomteSample
+
+NewSample = Annotated[TomteSample, Tag("new")]
+OldSample = Annotated[ExistingSample, Tag("existing")]
+
+
+class TomteCase(Case):
+ cohorts: list[str] | None = None
+ panels: list[str]
+ synopsis: str | None = None
+ samples: list[Annotated[NewSample | OldSample, Discriminator(has_internal_id)]]
+
+ def get_samples_with_father(self) -> list[tuple[TomteSample, int]]:
+ return [(sample, index) for index, sample in self.enumerated_samples if sample.father]
+
+ def get_samples_with_mother(self) -> list[tuple[TomteSample, int]]:
+ return [(sample, index) for index, sample in self.enumerated_samples if sample.mother]
diff --git a/cg/services/orders/validation/workflows/tomte/models/order.py b/cg/services/orders/validation/workflows/tomte/models/order.py
new file mode 100644
index 0000000000..e046b27e74
--- /dev/null
+++ b/cg/services/orders/validation/workflows/tomte/models/order.py
@@ -0,0 +1,16 @@
+from pydantic import Discriminator, Tag
+from typing_extensions import Annotated
+
+from cg.services.orders.validation.models.discriminators import has_internal_id
+from cg.services.orders.validation.models.existing_case import ExistingCase
+from cg.services.orders.validation.models.order_with_cases import OrderWithCases
+from cg.services.orders.validation.workflows.tomte.constants import TomteDeliveryType
+from cg.services.orders.validation.workflows.tomte.models.case import TomteCase
+
+NewCase = Annotated[TomteCase, Tag("new")]
+OldCase = Annotated[ExistingCase, Tag("existing")]
+
+
+class TomteOrder(OrderWithCases):
+ cases: list[Annotated[NewCase | OldCase, Discriminator(has_internal_id)]]
+ delivery_type: TomteDeliveryType
diff --git a/cg/services/orders/validation/workflows/tomte/models/sample.py b/cg/services/orders/validation/workflows/tomte/models/sample.py
new file mode 100644
index 0000000000..b70941cf95
--- /dev/null
+++ b/cg/services/orders/validation/workflows/tomte/models/sample.py
@@ -0,0 +1,28 @@
+from pydantic import BeforeValidator, Field
+from typing_extensions import Annotated
+
+from cg.constants.constants import GenomeVersion
+from cg.models.orders.sample_base import NAME_PATTERN, ControlEnum, SexEnum, StatusEnum
+from cg.services.orders.validation.constants import ElutionBuffer, TissueBlockEnum
+from cg.services.orders.validation.models.sample import Sample
+from cg.services.orders.validation.utils import parse_buffer, parse_control
+
+
+class TomteSample(Sample):
+ age_at_sampling: float | None = None
+ control: Annotated[ControlEnum, BeforeValidator(parse_control)] = ControlEnum.not_control
+ elution_buffer: Annotated[ElutionBuffer | None, BeforeValidator(parse_buffer)] = None
+ father: str | None = Field(None, pattern=NAME_PATTERN)
+ formalin_fixation_time: int | None = None
+ mother: str | None = Field(None, pattern=NAME_PATTERN)
+ phenotype_groups: list[str] | None = None
+ phenotype_terms: list[str] | None = None
+ post_formalin_fixation_time: int | None = None
+ reference_genome: GenomeVersion
+ require_qc_ok: bool = False
+ sex: SexEnum
+ source: str
+ status: StatusEnum
+ subject_id: str = Field(pattern=NAME_PATTERN, min_length=1, max_length=128)
+ tissue_block_size: TissueBlockEnum | None = None
+ concentration_ng_ul: float | None = None
diff --git a/cg/services/orders/validation/workflows/tomte/validation_rules.py b/cg/services/orders/validation/workflows/tomte/validation_rules.py
new file mode 100644
index 0000000000..a6e46ed1dc
--- /dev/null
+++ b/cg/services/orders/validation/workflows/tomte/validation_rules.py
@@ -0,0 +1,74 @@
+from cg.services.orders.validation.rules.case.rules import (
+ validate_case_internal_ids_exist,
+ validate_case_names_available,
+ validate_case_names_not_repeated,
+ validate_existing_cases_belong_to_collaboration,
+ validate_gene_panels_unique,
+)
+from cg.services.orders.validation.rules.case_sample.rules import (
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_application_not_archived,
+ validate_buffer_required,
+ validate_buffer_skip_rc_condition,
+ validate_concentration_interval_if_skip_rc,
+ validate_concentration_required_if_skip_rc,
+ validate_container_name_required,
+ validate_existing_samples_belong_to_collaboration,
+ validate_fathers_are_male,
+ validate_fathers_in_same_case_as_children,
+ validate_gene_panels_exist,
+ validate_mothers_are_female,
+ validate_mothers_in_same_case_as_children,
+ validate_pedigree,
+ validate_sample_names_different_from_case_names,
+ validate_sample_names_not_repeated,
+ validate_samples_exist,
+ validate_subject_ids_different_from_case_names,
+ validate_subject_ids_different_from_sample_names,
+ validate_subject_sex_consistency,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+)
+
+TOMTE_CASE_RULES: list[callable] = [
+ validate_case_internal_ids_exist,
+ validate_case_names_available,
+ validate_case_names_not_repeated,
+ validate_existing_cases_belong_to_collaboration,
+ validate_gene_panels_exist,
+ validate_gene_panels_unique,
+]
+
+TOMTE_CASE_SAMPLE_RULES: list[callable] = [
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_application_not_archived,
+ validate_buffer_required,
+ validate_buffer_skip_rc_condition,
+ validate_concentration_interval_if_skip_rc,
+ validate_concentration_required_if_skip_rc,
+ validate_container_name_required,
+ validate_existing_samples_belong_to_collaboration,
+ validate_fathers_are_male,
+ validate_fathers_in_same_case_as_children,
+ validate_mothers_are_female,
+ validate_mothers_in_same_case_as_children,
+ validate_pedigree,
+ validate_samples_exist,
+ validate_sample_names_different_from_case_names,
+ validate_sample_names_not_repeated,
+ validate_subject_ids_different_from_case_names,
+ validate_subject_ids_different_from_sample_names,
+ validate_subject_sex_consistency,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+]
diff --git a/cg/services/sequencing_qc_service/quality_checks/checks.py b/cg/services/sequencing_qc_service/quality_checks/checks.py
index 8a1a3ea4d3..e28433e444 100644
--- a/cg/services/sequencing_qc_service/quality_checks/checks.py
+++ b/cg/services/sequencing_qc_service/quality_checks/checks.py
@@ -33,6 +33,7 @@ def get_sequencing_quality_check_for_case(case: Case) -> Callable:
Workflow.BALSAMIC_UMI,
Workflow.MIP_DNA,
Workflow.MIP_RNA,
+ Workflow.NALLO,
Workflow.RAREDISEASE,
Workflow.RNAFUSION,
Workflow.TOMTE,
diff --git a/cg/store/crud/create.py b/cg/store/crud/create.py
index 40b0f72c83..418c901eac 100644
--- a/cg/store/crud/create.py
+++ b/cg/store/crud/create.py
@@ -8,7 +8,6 @@
from cg.constants import DataDelivery, Priority, Workflow
from cg.constants.archiving import PDC_ARCHIVE_LOCATION
from cg.models.orders.constants import OrderType
-from cg.models.orders.order import OrderIn
from cg.services.illumina.data_transfer.models import (
IlluminaFlowCellDTO,
IlluminaSampleSequencingMetricsDTO,
@@ -246,11 +245,13 @@ def add_case(
priority: Priority | None = Priority.standard,
synopsis: str | None = None,
customer_id: int | None = None,
+ comment: str | None = None,
) -> Case:
"""Build a new Case record."""
internal_id: str = self.generate_readable_case_id()
return Case(
+ comment=comment,
cohorts=cohorts,
data_analysis=str(data_analysis),
data_delivery=str(data_delivery),
@@ -398,17 +399,14 @@ def add_organism(
**kwargs,
)
- def add_order(self, order_data: OrderIn):
- customer: Customer = self.get_customer_by_internal_id(order_data.customer)
- workflow: str = order_data.samples[0].data_analysis
+ def add_order(self, customer: Customer, ticket_id: int, **kwargs) -> Order:
+ """Build a new Order record."""
order = Order(
- customer_id=customer.id,
- ticket_id=order_data.ticket,
- workflow=workflow,
+ customer=customer,
+ order_date=datetime.now(),
+ ticket_id=ticket_id,
+ **kwargs,
)
- session: Session = get_session()
- session.add(order)
- session.commit()
return order
@staticmethod
diff --git a/cg/store/crud/read.py b/cg/store/crud/read.py
index d82209443d..df2c29f369 100644
--- a/cg/store/crud/read.py
+++ b/cg/store/crud/read.py
@@ -8,10 +8,16 @@
from sqlalchemy.orm import Query, Session
from cg.constants import SequencingRunDataAvailability, Workflow
-from cg.constants.constants import CaseActions, CustomerId, SampleType
-from cg.constants.sequencing import SeqLibraryPrepCategory
+from cg.constants.constants import (
+ DNA_WORKFLOWS_WITH_SCOUT_UPLOAD,
+ CaseActions,
+ CustomerId,
+ SampleType,
+)
+from cg.constants.sequencing import DNA_PREP_CATEGORIES, SeqLibraryPrepCategory
from cg.exc import CaseNotFoundError, CgError, OrderNotFoundError, SampleNotFoundError
from cg.models.orders.constants import OrderType
+from cg.models.orders.sample_base import SexEnum
from cg.server.dto.samples.collaborator_samples_request import CollaboratorSamplesRequest
from cg.services.orders.order_service.models import OrderQueryParams
from cg.store.base import BaseHandler
@@ -952,7 +958,7 @@ def get_organism_by_internal_id(self, internal_id: str) -> Organism:
internal_id=internal_id,
).first()
- def get_all_organisms(self) -> list[Organism]:
+ def get_all_organisms(self) -> Query[Organism]:
"""Return all organisms ordered by organism internal id."""
return self._get_query(table=Organism).order_by(Organism.internal_id)
@@ -972,7 +978,7 @@ def get_panels(self) -> list[Panel]:
"""Returns all panels."""
return self._get_query(table=Panel).order_by(Panel.abbrev).all()
- def get_user_by_email(self, email: str) -> User:
+ def get_user_by_email(self, email: str) -> User | None:
"""Return a user by email from the database."""
return apply_user_filter(
users=self._get_query(table=User),
@@ -980,6 +986,23 @@ def get_user_by_email(self, email: str) -> User:
filter_functions=[UserFilter.BY_EMAIL],
).first()
+ def is_user_associated_with_customer(self, user_id: int, customer_internal_id: str) -> bool:
+ user: User | None = apply_user_filter(
+ users=self._get_query(table=User),
+ user_id=user_id,
+ customer_internal_id=customer_internal_id,
+ filter_functions=[UserFilter.BY_ID, UserFilter.BY_CUSTOMER_INTERNAL_ID],
+ ).first()
+ return bool(user)
+
+ def is_customer_trusted(self, customer_internal_id: str) -> bool:
+ customer: Customer | None = self.get_customer_by_internal_id(customer_internal_id)
+ return bool(customer and customer.is_trusted)
+
+ def customer_exists(self, customer_internal_id: str) -> bool:
+ customer: Customer | None = self.get_customer_by_internal_id(customer_internal_id)
+ return bool(customer)
+
def get_samples_to_receive(self, external: bool = False) -> list[Sample]:
"""Return samples to receive."""
records: Query = self._get_join_sample_application_version_query()
@@ -1428,7 +1451,8 @@ def get_orders(self, orders_params: OrderQueryParams) -> tuple[list[Order], int]
cases=orders,
filter_functions=[CaseFilter.BY_WORKFLOWS],
workflows=orders_params.workflows,
- ).distinct()
+ )
+ orders = orders.distinct()
orders: Query = apply_order_filters(
orders=orders,
filters=[OrderFilter.BY_SEARCH, OrderFilter.BY_OPEN],
@@ -1573,6 +1597,13 @@ def get_cases_for_sequencing_qc(self) -> list[Case]:
],
).all()
+ def is_application_archived(self, application_tag: str) -> bool:
+ application: Application | None = self.get_application_by_tag(application_tag)
+ return application and application.is_archived
+
+ def does_gene_panel_exist(self, abbreviation: str) -> bool:
+ return bool(self.get_panel_by_abbreviation(abbreviation))
+
def get_pac_bio_smrt_cell_by_internal_id(self, internal_id: str) -> PacbioSMRTCell:
return apply_pac_bio_smrt_cell_filters(
filter_functions=[PacBioSMRTCellFilter.BY_INTERNAL_ID],
@@ -1591,14 +1622,31 @@ def get_case_ids_for_samples(self, sample_ids: list[int]) -> list[str]:
case_ids.extend(self.get_case_ids_with_sample(sample_id))
return list(set(case_ids))
- def get_related_samples(
+ def sample_exists_with_different_sex(
self,
- sample_internal_id: str,
+ customer_internal_id: str,
+ subject_id: str,
+ sex: SexEnum,
+ ) -> bool:
+ samples: list[Sample] = self.get_samples_by_customer_and_subject_id(
+ customer_internal_id=customer_internal_id,
+ subject_id=subject_id,
+ )
+ for sample in samples:
+ if sample.sex == SexEnum.unknown:
+ continue
+ if sample.sex != sex:
+ return True
+ return False
+
+ def _get_related_samples_query(
+ self,
+ sample: Sample,
prep_categories: list[SeqLibraryPrepCategory],
collaborators: set[Customer],
- ) -> list[Sample]:
- """Returns a list of samples with the same subject_id, tumour status and within the collaborators of a given sample and within the given list of prep categories."""
- sample: Sample = self.get_sample_by_internal_id(internal_id=sample_internal_id)
+ ) -> Query:
+ """Returns a sample query with the same subject_id, tumour status and within the collaborators of the given
+ sample and within the given list of prep categories."""
sample_application_version_query: Query = self._get_join_sample_application_version_query()
@@ -1608,7 +1656,7 @@ def get_related_samples(
filter_functions=[ApplicationFilter.BY_PREP_CATEGORIES],
)
- sample_application_version_query: Query = apply_sample_filter(
+ samples: Query = apply_sample_filter(
samples=sample_application_version_query,
subject_id=sample.subject_id,
is_tumour=sample.is_tumour,
@@ -1619,27 +1667,48 @@ def get_related_samples(
SampleFilter.BY_CUSTOMER_ENTRY_IDS,
],
)
+ return samples
- return sample_application_version_query.all()
+ def get_uploaded_related_dna_cases(self, rna_case: Case) -> list[Case]:
+ """Returns all uploaded DNA cases ids related to the given RNA case."""
- def get_related_cases(
- self, sample_internal_id: str, workflows: list[Workflow], collaborators: set[Customer]
- ) -> list[Case]:
- """Return a list of cases linked to the given sample within the given list of workflows and customers in a collaboration."""
+ related_dna_cases: list[Case] = []
+ for rna_sample in rna_case.samples:
- cases_with_samples: Query = self._join_sample_and_case()
- cases_with_samples: Query = apply_case_sample_filter(
- case_samples=cases_with_samples,
- sample_internal_id=sample_internal_id,
- filter_functions=[CaseSampleFilter.CASES_WITH_SAMPLE_BY_INTERNAL_ID],
- )
+ collaborators: set[Customer] = rna_sample.customer.collaborators
- return apply_case_filter(
- cases=cases_with_samples,
- workflows=workflows,
- customer_entry_ids=[customer.id for customer in collaborators],
- filter_functions=[
- CaseFilter.BY_WORKFLOWS,
- CaseFilter.BY_CUSTOMER_ENTRY_IDS,
- ],
- ).all()
+ related_dna_samples_query: Query = self._get_related_samples_query(
+ sample=rna_sample,
+ prep_categories=DNA_PREP_CATEGORIES,
+ collaborators=collaborators,
+ )
+
+ dna_samples_cases_analysis_query: Query = (
+ related_dna_samples_query.join(Sample.links).join(CaseSample.case).join(Analysis)
+ )
+
+ dna_samples_cases_analysis_query: Query = apply_case_filter(
+ cases=dna_samples_cases_analysis_query,
+ workflows=DNA_WORKFLOWS_WITH_SCOUT_UPLOAD,
+ customer_entry_ids=[customer.id for customer in collaborators],
+ filter_functions=[
+ CaseFilter.BY_WORKFLOWS,
+ CaseFilter.BY_CUSTOMER_ENTRY_IDS,
+ ],
+ )
+
+ uploaded_dna_cases: list[Case] = (
+ apply_analysis_filter(
+ analyses=dna_samples_cases_analysis_query,
+ filter_functions=[AnalysisFilter.IS_UPLOADED],
+ )
+ .with_entities(Case)
+ .all()
+ )
+
+ related_dna_cases.extend([case for case in uploaded_dna_cases])
+ if not related_dna_cases:
+ raise CaseNotFoundError(
+ f"No matching uploaded DNA cases for case {rna_case.internal_id} ({rna_case.name})."
+ )
+ return related_dna_cases
diff --git a/cg/store/filters/status_case_sample_filters.py b/cg/store/filters/status_case_sample_filters.py
index 508b98435d..b8f30cec25 100644
--- a/cg/store/filters/status_case_sample_filters.py
+++ b/cg/store/filters/status_case_sample_filters.py
@@ -87,6 +87,7 @@ def apply_case_sample_filter(
cases_to_exclude: list[str] = [],
sample_entry_id: int | None = None,
sample_internal_id: str | None = None,
+ sample_internal_ids: list[str] | None = None,
order_id: int | None = None,
) -> Query:
"""Apply filtering functions to the sample queries and return filtered results."""
@@ -98,6 +99,7 @@ def apply_case_sample_filter(
cases_to_exclude=cases_to_exclude,
sample_entry_id=sample_entry_id,
sample_internal_id=sample_internal_id,
+ sample_internal_ids=sample_internal_ids,
order_id=order_id,
)
return case_samples
diff --git a/cg/store/filters/status_sample_filters.py b/cg/store/filters/status_sample_filters.py
index 46fdfd57b7..cf9818c441 100644
--- a/cg/store/filters/status_sample_filters.py
+++ b/cg/store/filters/status_sample_filters.py
@@ -13,6 +13,11 @@ def filter_samples_by_internal_id(internal_id: str, samples: Query, **kwargs) ->
return samples.filter(Sample.internal_id == internal_id)
+def filter_samples_by_internal_ids(internal_ids: list[str], samples: Query, **kwargs) -> Query:
+ """Return sample by internal id."""
+ return samples.filter(Sample.internal_id.in_(internal_ids)) if internal_ids else samples
+
+
def filter_samples_by_name(name: str, samples: Query, **kwargs) -> Query:
"""Return sample with sample name."""
return samples.filter(Sample.name == name)
@@ -224,6 +229,7 @@ class SampleFilter(Enum):
BY_ENTRY_ID: Callable = filter_samples_by_entry_id
BY_IDENTIFIER_NAME_AND_VALUE: Callable = filter_samples_by_identifier_name_and_value
BY_INTERNAL_ID: Callable = filter_samples_by_internal_id
+ BY_INTERNAL_IDS: Callable = filter_samples_by_internal_ids
BY_INTERNAL_ID_OR_NAME_SEARCH: Callable = filter_samples_by_internal_id_or_name_search
BY_INTERNAL_ID_PATTERN: Callable = filter_samples_by_internal_id_pattern
BY_INVOICE_ID: Callable = filter_samples_by_invoice_id
diff --git a/cg/store/filters/status_user_filters.py b/cg/store/filters/status_user_filters.py
index 61111d6598..6f832f33d5 100644
--- a/cg/store/filters/status_user_filters.py
+++ b/cg/store/filters/status_user_filters.py
@@ -3,7 +3,7 @@
from sqlalchemy.orm import Query
-from cg.store.models import User
+from cg.store.models import Customer, User
def filter_user_by_email(users: Query, email: str, **kwargs) -> Query:
@@ -11,21 +11,35 @@ def filter_user_by_email(users: Query, email: str, **kwargs) -> Query:
return users.filter(User.email == email)
+def filter_user_by_id(users: Query, user_id: int, **kwargs) -> Query:
+ return users.filter(User.id == user_id)
+
+
+def filter_user_by_customer_internal_id(users: Query, customer_internal_id: str, **kwargs) -> Query:
+ return users.join(User.customers).filter(Customer.internal_id == customer_internal_id)
+
+
class UserFilter(Enum):
"""Define User filter functions."""
BY_EMAIL: Callable = filter_user_by_email
+ BY_ID: Callable = filter_user_by_id
+ BY_CUSTOMER_INTERNAL_ID: Callable = filter_user_by_customer_internal_id
def apply_user_filter(
users: Query,
filter_functions: list[Callable],
email: str | None = None,
+ user_id: int | None = None,
+ customer_internal_id: str | None = None,
) -> Query:
"""Apply filtering functions and return filtered results."""
for filter_function in filter_functions:
users: Query = filter_function(
users=users,
email=email,
+ user_id=user_id,
+ customer_internal_id=customer_internal_id,
)
return users
diff --git a/cg/store/models.py b/cg/store/models.py
index f2fb0fccc6..377a034e9d 100644
--- a/cg/store/models.py
+++ b/cg/store/models.py
@@ -815,6 +815,11 @@ class Sample(Base, PriorityMixin):
back_populates="sample", cascade="all, delete"
)
+ def __init__(self, **kwargs):
+ for key, value in kwargs.items():
+ if hasattr(self, key):
+ setattr(self, key, value)
+
def __str__(self) -> str:
return f"{self.internal_id} ({self.name})"
diff --git a/cg/store/store.py b/cg/store/store.py
index 505924f37f..dedfaf1dc8 100644
--- a/cg/store/store.py
+++ b/cg/store/store.py
@@ -28,10 +28,18 @@ def commit_to_store(self):
"""Commit pending changes to the store."""
self.session.commit()
+ def add_item_to_store(self, item: ModelBase):
+ """Add an item to the store."""
+ self.session.add(item)
+
def add_multiple_items_to_store(self, items: list[ModelBase]):
"""Add multiple items to the store."""
self.session.add_all(items)
+ def no_autoflush_context(self):
+ """Return a context manager that disables autoflush for the session."""
+ return self.session.no_autoflush
+
def rollback(self):
"""Rollback any pending change to the store."""
self.session.rollback()
diff --git a/cg/utils/click/EnumChoice.py b/cg/utils/click/EnumChoice.py
index 0f77dd639f..5a3a64dd0c 100644
--- a/cg/utils/click/EnumChoice.py
+++ b/cg/utils/click/EnumChoice.py
@@ -4,7 +4,7 @@
from enum import EnumMeta
-import click
+import rich_click as click
class EnumChoice(click.Choice):
diff --git a/cg/utils/commands.py b/cg/utils/commands.py
index 25115d4f34..78f4c531ce 100644
--- a/cg/utils/commands.py
+++ b/cg/utils/commands.py
@@ -16,7 +16,7 @@ class Process:
"""Class to handle communication with other programs via the shell.
The other parts of the code should not need to have any knowledge about how the processes are
- called, that will be handled in this module.Output form stdout and stdin will be handled here.
+ called; that will be handled in this module.Output form stdout and stdin will be handled here.
"""
def __init__(
diff --git a/poetry.lock b/poetry.lock
index 7903d670bf..f315ea9994 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,14 +1,16 @@
-# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
+# This file is automatically @generated by Poetry 2.0.0 and should not be changed by hand.
[[package]]
name = "alembic"
-version = "1.12.0"
+version = "1.14.0"
description = "A database migration tool for SQLAlchemy."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "alembic-1.12.0-py3-none-any.whl", hash = "sha256:03226222f1cf943deee6c85d9464261a6c710cd19b4fe867a3ad1f25afda610f"},
- {file = "alembic-1.12.0.tar.gz", hash = "sha256:8e7645c32e4f200675e69f0745415335eb59a3663f5feb487abfa0b30c45888b"},
+ {file = "alembic-1.14.0-py3-none-any.whl", hash = "sha256:99bd884ca390466db5e27ffccff1d179ec5c05c965cfefc0607e69f9e411cb25"},
+ {file = "alembic-1.14.0.tar.gz", hash = "sha256:b00892b53b3642d0b8dbedba234dbf1924b69be83a9a769d5a624b01094e304b"},
]
[package.dependencies]
@@ -17,47 +19,55 @@ SQLAlchemy = ">=1.3.0"
typing-extensions = ">=4"
[package.extras]
-tz = ["python-dateutil"]
+tz = ["backports.zoneinfo"]
[[package]]
name = "annotated-types"
-version = "0.6.0"
+version = "0.7.0"
description = "Reusable constraint types to use with typing.Annotated"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"},
- {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"},
+ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
+ {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
]
[[package]]
name = "bcrypt"
-version = "4.0.1"
+version = "4.2.1"
description = "Modern password hashing for your software and your servers"
optional = false
-python-versions = ">=3.6"
-files = [
- {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"},
- {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"},
- {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"},
- {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"},
- {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"},
- {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"},
- {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"},
- {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"},
- {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"},
- {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"},
- {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"},
- {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"},
- {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"},
- {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"},
- {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"},
- {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"},
- {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"},
- {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"},
- {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"},
- {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"},
- {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"},
+python-versions = ">=3.7"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
+files = [
+ {file = "bcrypt-4.2.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:1340411a0894b7d3ef562fb233e4b6ed58add185228650942bdc885362f32c17"},
+ {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ee315739bc8387aa36ff127afc99120ee452924e0df517a8f3e4c0187a0f5f"},
+ {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dbd0747208912b1e4ce730c6725cb56c07ac734b3629b60d4398f082ea718ad"},
+ {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:aaa2e285be097050dba798d537b6efd9b698aa88eef52ec98d23dcd6d7cf6fea"},
+ {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:76d3e352b32f4eeb34703370e370997065d28a561e4a18afe4fef07249cb4396"},
+ {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b7703ede632dc945ed1172d6f24e9f30f27b1b1a067f32f68bf169c5f08d0425"},
+ {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:89df2aea2c43be1e1fa066df5f86c8ce822ab70a30e4c210968669565c0f4685"},
+ {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:04e56e3fe8308a88b77e0afd20bec516f74aecf391cdd6e374f15cbed32783d6"},
+ {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cfdf3d7530c790432046c40cda41dfee8c83e29482e6a604f8930b9930e94139"},
+ {file = "bcrypt-4.2.1-cp37-abi3-win32.whl", hash = "sha256:adadd36274510a01f33e6dc08f5824b97c9580583bd4487c564fc4617b328005"},
+ {file = "bcrypt-4.2.1-cp37-abi3-win_amd64.whl", hash = "sha256:8c458cd103e6c5d1d85cf600e546a639f234964d0228909d8f8dbeebff82d526"},
+ {file = "bcrypt-4.2.1-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:8ad2f4528cbf0febe80e5a3a57d7a74e6635e41af1ea5675282a33d769fba413"},
+ {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:909faa1027900f2252a9ca5dfebd25fc0ef1417943824783d1c8418dd7d6df4a"},
+ {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cde78d385d5e93ece5479a0a87f73cd6fa26b171c786a884f955e165032b262c"},
+ {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:533e7f3bcf2f07caee7ad98124fab7499cb3333ba2274f7a36cf1daee7409d99"},
+ {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:687cf30e6681eeda39548a93ce9bfbb300e48b4d445a43db4298d2474d2a1e54"},
+ {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:041fa0155c9004eb98a232d54da05c0b41d4b8e66b6fc3cb71b4b3f6144ba837"},
+ {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f85b1ffa09240c89aa2e1ae9f3b1c687104f7b2b9d2098da4e923f1b7082d331"},
+ {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c6f5fa3775966cca251848d4d5393ab016b3afed251163c1436fefdec3b02c84"},
+ {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:807261df60a8b1ccd13e6599c779014a362ae4e795f5c59747f60208daddd96d"},
+ {file = "bcrypt-4.2.1-cp39-abi3-win32.whl", hash = "sha256:b588af02b89d9fad33e5f98f7838bf590d6d692df7153647724a7f20c186f6bf"},
+ {file = "bcrypt-4.2.1-cp39-abi3-win_amd64.whl", hash = "sha256:e84e0e6f8e40a242b11bce56c313edc2be121cec3e0ec2d76fce01f6af33c07c"},
+ {file = "bcrypt-4.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76132c176a6d9953cdc83c296aeaed65e1a708485fd55abf163e0d9f8f16ce0e"},
+ {file = "bcrypt-4.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e158009a54c4c8bc91d5e0da80920d048f918c61a581f0a63e4e93bb556d362f"},
+ {file = "bcrypt-4.2.1.tar.gz", hash = "sha256:6765386e3ab87f569b276988742039baab087b2cdb01e809d74e74503c2faafe"},
]
[package.extras]
@@ -66,116 +76,141 @@ typecheck = ["mypy"]
[[package]]
name = "blinker"
-version = "1.6.3"
+version = "1.9.0"
description = "Fast, simple object-to-object and broadcast signaling"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "blinker-1.6.3-py3-none-any.whl", hash = "sha256:296320d6c28b006eb5e32d4712202dbcdcbf5dc482da298c2f44881c43884aaa"},
- {file = "blinker-1.6.3.tar.gz", hash = "sha256:152090d27c1c5c722ee7e48504b02d76502811ce02e1523553b4cf8c8b3d3a8d"},
+ {file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"},
+ {file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"},
]
[[package]]
name = "cachecontrol"
-version = "0.13.1"
+version = "0.14.2"
description = "httplib2 caching for requests"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "cachecontrol-0.13.1-py3-none-any.whl", hash = "sha256:95dedbec849f46dda3137866dc28b9d133fc9af55f5b805ab1291833e4457aa4"},
- {file = "cachecontrol-0.13.1.tar.gz", hash = "sha256:f012366b79d2243a6118309ce73151bf52a38d4a5dac8ea57f09bd29087e506b"},
+ {file = "cachecontrol-0.14.2-py3-none-any.whl", hash = "sha256:ebad2091bf12d0d200dfc2464330db638c5deb41d546f6d7aca079e87290f3b0"},
+ {file = "cachecontrol-0.14.2.tar.gz", hash = "sha256:7d47d19f866409b98ff6025b6a0fca8e4c791fb31abbd95f622093894ce903a2"},
]
[package.dependencies]
-msgpack = ">=0.5.2"
+msgpack = ">=0.5.2,<2.0.0"
requests = ">=2.16.0"
[package.extras]
-dev = ["CacheControl[filecache,redis]", "black", "build", "cherrypy", "mypy", "pytest", "pytest-cov", "sphinx", "tox", "types-redis", "types-requests"]
+dev = ["CacheControl[filecache,redis]", "build", "cherrypy", "codespell[tomli]", "furo", "mypy", "pytest", "pytest-cov", "ruff", "sphinx", "sphinx-copybutton", "tox", "types-redis", "types-requests"]
filecache = ["filelock (>=3.8.0)"]
redis = ["redis (>=2.10.5)"]
[[package]]
name = "cachetools"
-version = "5.3.1"
+version = "5.5.0"
description = "Extensible memoizing collections and decorators"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"},
- {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"},
+ {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"},
+ {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"},
]
[[package]]
name = "certifi"
-version = "2024.7.4"
+version = "2024.12.14"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
- {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
+ {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"},
+ {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"},
]
[[package]]
name = "cffi"
-version = "1.16.0"
+version = "1.17.1"
description = "Foreign Function Interface for Python calling C code."
optional = false
python-versions = ">=3.8"
-files = [
- {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"},
- {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"},
- {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"},
- {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"},
- {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"},
- {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"},
- {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"},
- {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"},
- {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"},
- {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"},
- {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"},
- {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"},
- {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"},
- {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"},
- {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"},
- {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"},
- {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"},
- {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"},
- {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"},
- {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"},
- {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"},
- {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"},
- {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"},
- {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"},
- {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"},
- {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"},
- {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"},
- {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"},
- {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"},
- {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"},
- {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"},
- {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"},
- {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"},
- {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"},
- {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"},
- {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"},
- {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"},
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
+files = [
+ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"},
+ {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"},
+ {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"},
+ {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"},
+ {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"},
+ {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"},
+ {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"},
+ {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"},
+ {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"},
+ {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"},
+ {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"},
+ {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"},
+ {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"},
+ {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"},
+ {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"},
+ {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"},
+ {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"},
+ {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"},
+ {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"},
+ {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"},
+ {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"},
+ {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"},
+ {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"},
+ {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"},
+ {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"},
+ {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"},
+ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
+ {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
]
[package.dependencies]
@@ -185,8 +220,10 @@ pycparser = "*"
name = "cfgv"
version = "3.4.0"
description = "Validate configuration and produce human readable error messages."
-optional = false
+optional = true
python-versions = ">=3.8"
+groups = ["main"]
+markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"pre-commit\""
files = [
{file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
{file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
@@ -194,112 +231,118 @@ files = [
[[package]]
name = "charset-normalizer"
-version = "3.3.1"
+version = "3.4.1"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
-python-versions = ">=3.7.0"
-files = [
- {file = "charset-normalizer-3.3.1.tar.gz", hash = "sha256:d9137a876020661972ca6eec0766d81aef8a5627df628b664b234b73396e727e"},
- {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8aee051c89e13565c6bd366813c386939f8e928af93c29fda4af86d25b73d8f8"},
- {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:352a88c3df0d1fa886562384b86f9a9e27563d4704ee0e9d56ec6fcd270ea690"},
- {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:223b4d54561c01048f657fa6ce41461d5ad8ff128b9678cfe8b2ecd951e3f8a2"},
- {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f861d94c2a450b974b86093c6c027888627b8082f1299dfd5a4bae8e2292821"},
- {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1171ef1fc5ab4693c5d151ae0fdad7f7349920eabbaca6271f95969fa0756c2d"},
- {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28f512b9a33235545fbbdac6a330a510b63be278a50071a336afc1b78781b147"},
- {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0e842112fe3f1a4ffcf64b06dc4c61a88441c2f02f373367f7b4c1aa9be2ad5"},
- {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f9bc2ce123637a60ebe819f9fccc614da1bcc05798bbbaf2dd4ec91f3e08846"},
- {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f194cce575e59ffe442c10a360182a986535fd90b57f7debfaa5c845c409ecc3"},
- {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9a74041ba0bfa9bc9b9bb2cd3238a6ab3b7618e759b41bd15b5f6ad958d17605"},
- {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b578cbe580e3b41ad17b1c428f382c814b32a6ce90f2d8e39e2e635d49e498d1"},
- {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6db3cfb9b4fcecb4390db154e75b49578c87a3b9979b40cdf90d7e4b945656e1"},
- {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:debb633f3f7856f95ad957d9b9c781f8e2c6303ef21724ec94bea2ce2fcbd056"},
- {file = "charset_normalizer-3.3.1-cp310-cp310-win32.whl", hash = "sha256:87071618d3d8ec8b186d53cb6e66955ef2a0e4fa63ccd3709c0c90ac5a43520f"},
- {file = "charset_normalizer-3.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:e372d7dfd154009142631de2d316adad3cc1c36c32a38b16a4751ba78da2a397"},
- {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae4070f741f8d809075ef697877fd350ecf0b7c5837ed68738607ee0a2c572cf"},
- {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58e875eb7016fd014c0eea46c6fa92b87b62c0cb31b9feae25cbbe62c919f54d"},
- {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dbd95e300367aa0827496fe75a1766d198d34385a58f97683fe6e07f89ca3e3c"},
- {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de0b4caa1c8a21394e8ce971997614a17648f94e1cd0640fbd6b4d14cab13a72"},
- {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:985c7965f62f6f32bf432e2681173db41336a9c2611693247069288bcb0c7f8b"},
- {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a15c1fe6d26e83fd2e5972425a772cca158eae58b05d4a25a4e474c221053e2d"},
- {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae55d592b02c4349525b6ed8f74c692509e5adffa842e582c0f861751701a673"},
- {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be4d9c2770044a59715eb57c1144dedea7c5d5ae80c68fb9959515037cde2008"},
- {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:851cf693fb3aaef71031237cd68699dded198657ec1e76a76eb8be58c03a5d1f"},
- {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:31bbaba7218904d2eabecf4feec0d07469284e952a27400f23b6628439439fa7"},
- {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:871d045d6ccc181fd863a3cd66ee8e395523ebfbc57f85f91f035f50cee8e3d4"},
- {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:501adc5eb6cd5f40a6f77fbd90e5ab915c8fd6e8c614af2db5561e16c600d6f3"},
- {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f5fb672c396d826ca16a022ac04c9dce74e00a1c344f6ad1a0fdc1ba1f332213"},
- {file = "charset_normalizer-3.3.1-cp311-cp311-win32.whl", hash = "sha256:bb06098d019766ca16fc915ecaa455c1f1cd594204e7f840cd6258237b5079a8"},
- {file = "charset_normalizer-3.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:8af5a8917b8af42295e86b64903156b4f110a30dca5f3b5aedea123fbd638bff"},
- {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ae8e5142dcc7a49168f4055255dbcced01dc1714a90a21f87448dc8d90617d1"},
- {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5b70bab78accbc672f50e878a5b73ca692f45f5b5e25c8066d748c09405e6a55"},
- {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ceca5876032362ae73b83347be8b5dbd2d1faf3358deb38c9c88776779b2e2f"},
- {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34d95638ff3613849f473afc33f65c401a89f3b9528d0d213c7037c398a51296"},
- {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9edbe6a5bf8b56a4a84533ba2b2f489d0046e755c29616ef8830f9e7d9cf5728"},
- {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6a02a3c7950cafaadcd46a226ad9e12fc9744652cc69f9e5534f98b47f3bbcf"},
- {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10b8dd31e10f32410751b3430996f9807fc4d1587ca69772e2aa940a82ab571a"},
- {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edc0202099ea1d82844316604e17d2b175044f9bcb6b398aab781eba957224bd"},
- {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b891a2f68e09c5ef989007fac11476ed33c5c9994449a4e2c3386529d703dc8b"},
- {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:71ef3b9be10070360f289aea4838c784f8b851be3ba58cf796262b57775c2f14"},
- {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:55602981b2dbf8184c098bc10287e8c245e351cd4fdcad050bd7199d5a8bf514"},
- {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:46fb9970aa5eeca547d7aa0de5d4b124a288b42eaefac677bde805013c95725c"},
- {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:520b7a142d2524f999447b3a0cf95115df81c4f33003c51a6ab637cbda9d0bf4"},
- {file = "charset_normalizer-3.3.1-cp312-cp312-win32.whl", hash = "sha256:8ec8ef42c6cd5856a7613dcd1eaf21e5573b2185263d87d27c8edcae33b62a61"},
- {file = "charset_normalizer-3.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:baec8148d6b8bd5cee1ae138ba658c71f5b03e0d69d5907703e3e1df96db5e41"},
- {file = "charset_normalizer-3.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63a6f59e2d01310f754c270e4a257426fe5a591dc487f1983b3bbe793cf6bac6"},
- {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d6bfc32a68bc0933819cfdfe45f9abc3cae3877e1d90aac7259d57e6e0f85b1"},
- {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f3100d86dcd03c03f7e9c3fdb23d92e32abbca07e7c13ebd7ddfbcb06f5991f"},
- {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39b70a6f88eebe239fa775190796d55a33cfb6d36b9ffdd37843f7c4c1b5dc67"},
- {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e12f8ee80aa35e746230a2af83e81bd6b52daa92a8afaef4fea4a2ce9b9f4fa"},
- {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b6cefa579e1237ce198619b76eaa148b71894fb0d6bcf9024460f9bf30fd228"},
- {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:61f1e3fb621f5420523abb71f5771a204b33c21d31e7d9d86881b2cffe92c47c"},
- {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4f6e2a839f83a6a76854d12dbebde50e4b1afa63e27761549d006fa53e9aa80e"},
- {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:1ec937546cad86d0dce5396748bf392bb7b62a9eeb8c66efac60e947697f0e58"},
- {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:82ca51ff0fc5b641a2d4e1cc8c5ff108699b7a56d7f3ad6f6da9dbb6f0145b48"},
- {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:633968254f8d421e70f91c6ebe71ed0ab140220469cf87a9857e21c16687c034"},
- {file = "charset_normalizer-3.3.1-cp37-cp37m-win32.whl", hash = "sha256:c0c72d34e7de5604df0fde3644cc079feee5e55464967d10b24b1de268deceb9"},
- {file = "charset_normalizer-3.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:63accd11149c0f9a99e3bc095bbdb5a464862d77a7e309ad5938fbc8721235ae"},
- {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5a3580a4fdc4ac05f9e53c57f965e3594b2f99796231380adb2baaab96e22761"},
- {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2465aa50c9299d615d757c1c888bc6fef384b7c4aec81c05a0172b4400f98557"},
- {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb7cd68814308aade9d0c93c5bd2ade9f9441666f8ba5aa9c2d4b389cb5e2a45"},
- {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e43805ccafa0a91831f9cd5443aa34528c0c3f2cc48c4cb3d9a7721053874b"},
- {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:854cc74367180beb327ab9d00f964f6d91da06450b0855cbbb09187bcdb02de5"},
- {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c15070ebf11b8b7fd1bfff7217e9324963c82dbdf6182ff7050519e350e7ad9f"},
- {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4c99f98fc3a1835af8179dcc9013f93594d0670e2fa80c83aa36346ee763d2"},
- {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fb765362688821404ad6cf86772fc54993ec11577cd5a92ac44b4c2ba52155b"},
- {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dced27917823df984fe0c80a5c4ad75cf58df0fbfae890bc08004cd3888922a2"},
- {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a66bcdf19c1a523e41b8e9d53d0cedbfbac2e93c649a2e9502cb26c014d0980c"},
- {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ecd26be9f112c4f96718290c10f4caea6cc798459a3a76636b817a0ed7874e42"},
- {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3f70fd716855cd3b855316b226a1ac8bdb3caf4f7ea96edcccc6f484217c9597"},
- {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:17a866d61259c7de1bdadef418a37755050ddb4b922df8b356503234fff7932c"},
- {file = "charset_normalizer-3.3.1-cp38-cp38-win32.whl", hash = "sha256:548eefad783ed787b38cb6f9a574bd8664468cc76d1538215d510a3cd41406cb"},
- {file = "charset_normalizer-3.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:45f053a0ece92c734d874861ffe6e3cc92150e32136dd59ab1fb070575189c97"},
- {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bc791ec3fd0c4309a753f95bb6c749ef0d8ea3aea91f07ee1cf06b7b02118f2f"},
- {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c8c61fb505c7dad1d251c284e712d4e0372cef3b067f7ddf82a7fa82e1e9a93"},
- {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2c092be3885a1b7899cd85ce24acedc1034199d6fca1483fa2c3a35c86e43041"},
- {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2000c54c395d9e5e44c99dc7c20a64dc371f777faf8bae4919ad3e99ce5253e"},
- {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4cb50a0335382aac15c31b61d8531bc9bb657cfd848b1d7158009472189f3d62"},
- {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c30187840d36d0ba2893bc3271a36a517a717f9fd383a98e2697ee890a37c273"},
- {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe81b35c33772e56f4b6cf62cf4aedc1762ef7162a31e6ac7fe5e40d0149eb67"},
- {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0bf89afcbcf4d1bb2652f6580e5e55a840fdf87384f6063c4a4f0c95e378656"},
- {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:06cf46bdff72f58645434d467bf5228080801298fbba19fe268a01b4534467f5"},
- {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3c66df3f41abee950d6638adc7eac4730a306b022570f71dd0bd6ba53503ab57"},
- {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd805513198304026bd379d1d516afbf6c3c13f4382134a2c526b8b854da1c2e"},
- {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:9505dc359edb6a330efcd2be825fdb73ee3e628d9010597aa1aee5aa63442e97"},
- {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:31445f38053476a0c4e6d12b047b08ced81e2c7c712e5a1ad97bc913256f91b2"},
- {file = "charset_normalizer-3.3.1-cp39-cp39-win32.whl", hash = "sha256:bd28b31730f0e982ace8663d108e01199098432a30a4c410d06fe08fdb9e93f4"},
- {file = "charset_normalizer-3.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:555fe186da0068d3354cdf4bbcbc609b0ecae4d04c921cc13e209eece7720727"},
- {file = "charset_normalizer-3.3.1-py3-none-any.whl", hash = "sha256:800561453acdecedaac137bf09cd719c7a440b6800ec182f077bb8e7025fb708"},
+python-versions = ">=3.7"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
+files = [
+ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"},
+ {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"},
+ {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"},
]
[[package]]
name = "click"
-version = "8.1.7"
+version = "8.1.8"
description = "Composable command line interface toolkit"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
- {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
+ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"},
+ {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"},
]
[package.dependencies]
@@ -311,6 +354,8 @@ version = "0.4.6"
description = "Cross-platform colored terminal text."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+groups = ["main"]
+markers = "(extra == \"pytest-cov\" or platform_system == \"Windows\" or extra == \"pytest-mock\" or extra == \"pytest\" or extra == \"pytest-xdist\") and (sys_platform == \"win32\" or platform_system == \"Windows\") and (python_version <= \"3.11\" or python_version >= \"3.12\")"
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
@@ -322,6 +367,8 @@ version = "15.0.1"
description = "Colored terminal output for Python's logging module"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"},
{file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"},
@@ -335,61 +382,75 @@ cron = ["capturer (>=2.4)"]
[[package]]
name = "coverage"
-version = "6.5.0"
+version = "7.6.10"
description = "Code coverage measurement for Python"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"},
- {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"},
- {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"},
- {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"},
- {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"},
- {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"},
- {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"},
- {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"},
- {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"},
- {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"},
- {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"},
- {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"},
- {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"},
- {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"},
- {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"},
- {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"},
- {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"},
- {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"},
- {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"},
- {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"},
- {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"},
- {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"},
- {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"},
- {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"},
- {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"},
- {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"},
- {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"},
- {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"},
- {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"},
- {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"},
- {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"},
- {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"},
- {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"},
- {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"},
- {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"},
- {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"},
- {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"},
- {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"},
- {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"},
- {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"},
- {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"},
- {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"},
- {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"},
- {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"},
- {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"},
- {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"},
- {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"},
- {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"},
- {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"},
- {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"},
+optional = true
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "(extra == \"coveralls\" or extra == \"pytest-cov\") and (python_version <= \"3.11\" or python_version >= \"3.12\")"
+files = [
+ {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"},
+ {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"},
+ {file = "coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a"},
+ {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165"},
+ {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988"},
+ {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5"},
+ {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3"},
+ {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5"},
+ {file = "coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244"},
+ {file = "coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e"},
+ {file = "coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3"},
+ {file = "coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43"},
+ {file = "coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132"},
+ {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f"},
+ {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994"},
+ {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99"},
+ {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd"},
+ {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377"},
+ {file = "coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8"},
+ {file = "coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609"},
+ {file = "coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853"},
+ {file = "coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078"},
+ {file = "coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0"},
+ {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50"},
+ {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022"},
+ {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b"},
+ {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0"},
+ {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852"},
+ {file = "coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359"},
+ {file = "coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247"},
+ {file = "coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9"},
+ {file = "coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b"},
+ {file = "coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690"},
+ {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18"},
+ {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c"},
+ {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd"},
+ {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e"},
+ {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694"},
+ {file = "coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6"},
+ {file = "coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e"},
+ {file = "coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe"},
+ {file = "coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273"},
+ {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8"},
+ {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098"},
+ {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb"},
+ {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0"},
+ {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf"},
+ {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2"},
+ {file = "coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312"},
+ {file = "coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d"},
+ {file = "coverage-7.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a"},
+ {file = "coverage-7.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27"},
+ {file = "coverage-7.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4"},
+ {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f"},
+ {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25"},
+ {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315"},
+ {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90"},
+ {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d"},
+ {file = "coverage-7.6.10-cp39-cp39-win32.whl", hash = "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18"},
+ {file = "coverage-7.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59"},
+ {file = "coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f"},
+ {file = "coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23"},
]
[package.dependencies]
@@ -400,57 +461,61 @@ toml = ["tomli"]
[[package]]
name = "coveralls"
-version = "3.3.1"
+version = "4.0.1"
description = "Show coverage stats online via coveralls.io"
-optional = false
-python-versions = ">= 3.5"
+optional = true
+python-versions = "<3.13,>=3.8"
+groups = ["main"]
+markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"coveralls\""
files = [
- {file = "coveralls-3.3.1-py2.py3-none-any.whl", hash = "sha256:f42015f31d386b351d4226389b387ae173207058832fbf5c8ec4b40e27b16026"},
- {file = "coveralls-3.3.1.tar.gz", hash = "sha256:b32a8bb5d2df585207c119d6c01567b81fba690c9c10a753bfe27a335bfc43ea"},
+ {file = "coveralls-4.0.1-py3-none-any.whl", hash = "sha256:7a6b1fa9848332c7b2221afb20f3df90272ac0167060f41b5fe90429b30b1809"},
+ {file = "coveralls-4.0.1.tar.gz", hash = "sha256:7b2a0a2bcef94f295e3cf28dcc55ca40b71c77d1c2446b538e85f0f7bc21aa69"},
]
[package.dependencies]
-coverage = ">=4.1,<6.0.dev0 || >6.1,<6.1.1 || >6.1.1,<7.0"
-docopt = ">=0.6.1"
-requests = ">=1.0.0"
+coverage = {version = ">=5.0,<6.0.dev0 || >6.1,<6.1.1 || >6.1.1,<8.0", extras = ["toml"]}
+docopt = ">=0.6.1,<0.7.0"
+requests = ">=1.0.0,<3.0.0"
[package.extras]
-yaml = ["PyYAML (>=3.10)"]
+yaml = ["pyyaml (>=3.10,<7.0)"]
[[package]]
name = "cryptography"
-version = "43.0.1"
+version = "43.0.3"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = ">=3.7"
-files = [
- {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"},
- {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"},
- {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"},
- {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"},
- {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"},
- {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"},
- {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"},
- {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"},
- {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"},
- {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"},
- {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"},
- {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"},
- {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"},
- {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"},
- {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"},
- {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"},
- {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"},
- {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"},
- {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"},
- {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"},
- {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"},
- {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"},
- {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"},
- {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"},
- {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"},
- {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"},
- {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"},
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
+files = [
+ {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"},
+ {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"},
+ {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"},
+ {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"},
+ {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"},
+ {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"},
+ {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"},
+ {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"},
+ {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"},
+ {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"},
+ {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"},
+ {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"},
+ {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"},
+ {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"},
+ {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"},
+ {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"},
+ {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"},
+ {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"},
+ {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"},
+ {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"},
+ {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"},
+ {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"},
+ {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"},
+ {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"},
+ {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"},
+ {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"},
+ {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"},
]
[package.dependencies]
@@ -463,37 +528,41 @@ nox = ["nox"]
pep8test = ["check-sdist", "click", "mypy", "ruff"]
sdist = ["build"]
ssh = ["bcrypt (>=3.1.5)"]
-test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
+test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
test-randomorder = ["pytest-randomly"]
[[package]]
name = "distlib"
-version = "0.3.7"
+version = "0.3.9"
description = "Distribution utilities"
-optional = false
+optional = true
python-versions = "*"
+groups = ["main"]
+markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"pre-commit\""
files = [
- {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"},
- {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"},
+ {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"},
+ {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"},
]
[[package]]
name = "dnspython"
-version = "2.6.1"
+version = "2.7.0"
description = "DNS toolkit"
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"},
- {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"},
+ {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"},
+ {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"},
]
[package.extras]
-dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"]
-dnssec = ["cryptography (>=41)"]
+dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"]
+dnssec = ["cryptography (>=43)"]
doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"]
-doq = ["aioquic (>=0.9.25)"]
-idna = ["idna (>=3.6)"]
+doq = ["aioquic (>=1.0.0)"]
+idna = ["idna (>=3.7)"]
trio = ["trio (>=0.23)"]
wmi = ["wmi (>=1.5.1)"]
@@ -501,8 +570,10 @@ wmi = ["wmi (>=1.5.1)"]
name = "docopt"
version = "0.6.2"
description = "Pythonic argument parser, that will make you smile"
-optional = false
+optional = true
python-versions = "*"
+groups = ["main"]
+markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"coveralls\""
files = [
{file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"},
]
@@ -513,6 +584,8 @@ version = "2.2.0"
description = "A robust email address syntax and deliverability validation library."
optional = false
python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"},
{file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"},
@@ -524,24 +597,28 @@ idna = ">=2.0.0"
[[package]]
name = "et-xmlfile"
-version = "1.1.0"
+version = "2.0.0"
description = "An implementation of lxml.xmlfile for the standard library"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"},
- {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"},
+ {file = "et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa"},
+ {file = "et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54"},
]
[[package]]
name = "exceptiongroup"
-version = "1.1.3"
+version = "1.2.2"
description = "Backport of PEP 654 (exception groups)"
-optional = false
+optional = true
python-versions = ">=3.7"
+groups = ["main"]
+markers = "(extra == \"pytest-cov\" or extra == \"pytest-mock\" or extra == \"pytest\" or extra == \"pytest-xdist\") and python_version < \"3.11\""
files = [
- {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"},
- {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"},
+ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
+ {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
]
[package.extras]
@@ -549,13 +626,15 @@ test = ["pytest (>=6)"]
[[package]]
name = "execnet"
-version = "2.0.2"
+version = "2.1.1"
description = "execnet: rapid multi-Python deployment"
-optional = false
-python-versions = ">=3.7"
+optional = true
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"pytest-xdist\""
files = [
- {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"},
- {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"},
+ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"},
+ {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"},
]
[package.extras]
@@ -563,38 +642,42 @@ testing = ["hatch", "pre-commit", "pytest", "tox"]
[[package]]
name = "filelock"
-version = "3.12.4"
+version = "3.16.1"
description = "A platform independent file lock."
-optional = false
+optional = true
python-versions = ">=3.8"
+groups = ["main"]
+markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"pre-commit\""
files = [
- {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"},
- {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"},
+ {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"},
+ {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"},
]
[package.extras]
-docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"]
-testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"]
-typing = ["typing-extensions (>=4.7.1)"]
+docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"]
+testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"]
+typing = ["typing-extensions (>=4.12.2)"]
[[package]]
name = "flask"
-version = "3.0.0"
+version = "3.1.0"
description = "A simple framework for building complex web applications."
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "flask-3.0.0-py3-none-any.whl", hash = "sha256:21128f47e4e3b9d597a3e8521a329bf56909b690fcc3fa3e477725aa81367638"},
- {file = "flask-3.0.0.tar.gz", hash = "sha256:cfadcdb638b609361d29ec22360d6070a77d7463dcb3ab08d2c2f2f168845f58"},
+ {file = "flask-3.1.0-py3-none-any.whl", hash = "sha256:d667207822eb83f1c4b50949b1623c8fc8d51f2341d65f72e1a1815397551136"},
+ {file = "flask-3.1.0.tar.gz", hash = "sha256:5f873c5184c897c8d9d1b05df1e3d01b14910ce69607a117bd3277098a5836ac"},
]
[package.dependencies]
-blinker = ">=1.6.2"
+blinker = ">=1.9"
click = ">=8.1.3"
-importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""}
-itsdangerous = ">=2.1.2"
+importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""}
+itsdangerous = ">=2.2"
Jinja2 = ">=3.1.2"
-Werkzeug = ">=3.0.0"
+Werkzeug = ">=3.1"
[package.extras]
async = ["asgiref (>=3.2)"]
@@ -606,6 +689,8 @@ version = "1.6.1"
description = "Simple and extensible admin interface framework for Flask"
optional = false
python-versions = ">=3.6"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "Flask-Admin-1.6.1.tar.gz", hash = "sha256:24cae2af832b6a611a01d7dc35f42d266c1d6c75a426b869d8cb241b78233369"},
{file = "Flask_Admin-1.6.1-py3-none-any.whl", hash = "sha256:fd8190f1ec3355913a22739c46ed3623f1d82b8112cde324c60a6fc9b21c9406"},
@@ -625,6 +710,8 @@ version = "5.0.0"
description = "A Flask extension adding a decorator for CORS support"
optional = false
python-versions = "*"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "Flask_Cors-5.0.0-py2.py3-none-any.whl", hash = "sha256:b9e307d082a9261c100d8fb0ba909eec6a228ed1b60a8315fd85f783d61910bc"},
{file = "flask_cors-5.0.0.tar.gz", hash = "sha256:5aadb4b950c4e93745034594d9f3ea6591f734bb3662e16e255ffbf5e89c88ef"},
@@ -635,13 +722,15 @@ Flask = ">=0.9"
[[package]]
name = "flask-dance"
-version = "7.0.0"
+version = "7.1.0"
description = "Doing the OAuth dance with style using Flask, requests, and oauthlib"
optional = false
python-versions = ">=3.6"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "Flask-Dance-7.0.0.tar.gz", hash = "sha256:a37dec5c3a21f13966178285d5c10691cd72203dcef8a01db802fef6287e716d"},
- {file = "flask_dance-7.0.0-py3-none-any.whl", hash = "sha256:935f5806d587eed94b53cf356ee921466bb32ba83405ed5a4e8ac8ea859620dc"},
+ {file = "flask_dance-7.1.0-py3-none-any.whl", hash = "sha256:81599328a2b3604fd4332b3d41a901cf36980c2067e5e38c44ce3b85c4e1ae9c"},
+ {file = "flask_dance-7.1.0.tar.gz", hash = "sha256:6d0510e284f3d6ff05af918849791b17ef93a008628ec33f3a80578a44b51674"},
]
[package.dependencies]
@@ -653,20 +742,22 @@ urlobject = "*"
Werkzeug = "*"
[package.extras]
-docs = ["Flask-Sphinx-Themes", "betamax", "pytest", "sphinx (>=1.3)", "sphinxcontrib-seqdiag", "sphinxcontrib-spelling", "sqlalchemy (>=1.3.11)"]
+docs = ["Flask-Sphinx-Themes", "betamax", "pillow (<=9.5)", "pytest", "sphinx (>=1.3)", "sphinxcontrib-seqdiag", "sphinxcontrib-spelling", "sqlalchemy (>=1.3.11)"]
signals = ["blinker"]
sqla = ["sqlalchemy (>=1.3.11)"]
test = ["betamax", "coverage", "flask-caching", "flask-login", "flask-sqlalchemy", "freezegun", "oauthlib[signedtoken]", "pytest", "pytest-mock", "responses", "sqlalchemy (>=1.3.11)"]
[[package]]
name = "flask-wtf"
-version = "1.2.1"
+version = "1.2.2"
description = "Form rendering, validation, and CSRF protection for Flask with WTForms."
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "flask_wtf-1.2.1-py3-none-any.whl", hash = "sha256:fa6793f2fb7e812e0fe9743b282118e581fb1b6c45d414b8af05e659bd653287"},
- {file = "flask_wtf-1.2.1.tar.gz", hash = "sha256:8bb269eb9bb46b87e7c8233d7e7debdf1f8b74bf90cc1789988c29b37a97b695"},
+ {file = "flask_wtf-1.2.2-py3-none-any.whl", hash = "sha256:e93160c5c5b6b571cf99300b6e01b72f9a101027cab1579901f8b10c5daf0b70"},
+ {file = "flask_wtf-1.2.2.tar.gz", hash = "sha256:79d2ee1e436cf570bccb7d916533fa18757a2f18c290accffab1b9a0b684666b"},
]
[package.dependencies]
@@ -683,6 +774,8 @@ version = "1.0.0"
description = "Python interface to the Illumina Basespace Clarity LIMS (Laboratory Information Management System) server via its REST API."
optional = false
python-versions = "*"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "genologics-1.0.0-py3-none-any.whl", hash = "sha256:3f3557f60978449af27ae18afd877482ad7ea4978a77e4aa233cb2260feb30db"},
{file = "genologics-1.0.0.tar.gz", hash = "sha256:59bd07ea5b6d0fb47b26404ce98632a2c58ba85897522cb4d95fea233cde8122"},
@@ -693,13 +786,15 @@ requests = "*"
[[package]]
name = "google-auth"
-version = "2.23.2"
+version = "2.37.0"
description = "Google Authentication Library"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "google-auth-2.23.2.tar.gz", hash = "sha256:5a9af4be520ba33651471a0264eead312521566f44631cbb621164bc30c8fd40"},
- {file = "google_auth-2.23.2-py2.py3-none-any.whl", hash = "sha256:c2e253347579d483004f17c3bd0bf92e611ef6c7ba24d41c5c59f2e7aeeaf088"},
+ {file = "google_auth-2.37.0-py2.py3-none-any.whl", hash = "sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0"},
+ {file = "google_auth-2.37.0.tar.gz", hash = "sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00"},
]
[package.dependencies]
@@ -709,95 +804,111 @@ rsa = ">=3.1.4,<5"
[package.extras]
aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"]
-enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"]
+enterprise-cert = ["cryptography", "pyopenssl"]
+pyjwt = ["cryptography (>=38.0.3)", "pyjwt (>=2.0)"]
pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"]
reauth = ["pyu2f (>=0.1.5)"]
requests = ["requests (>=2.20.0,<3.0.0.dev0)"]
[[package]]
name = "greenlet"
-version = "3.0.0"
+version = "3.1.1"
description = "Lightweight in-process concurrent programming"
optional = false
python-versions = ">=3.7"
-files = [
- {file = "greenlet-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e09dea87cc91aea5500262993cbd484b41edf8af74f976719dd83fe724644cd6"},
- {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47932c434a3c8d3c86d865443fadc1fbf574e9b11d6650b656e602b1797908a"},
- {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bdfaeecf8cc705d35d8e6de324bf58427d7eafb55f67050d8f28053a3d57118c"},
- {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a68d670c8f89ff65c82b936275369e532772eebc027c3be68c6b87ad05ca695"},
- {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ad562a104cd41e9d4644f46ea37167b93190c6d5e4048fcc4b80d34ecb278f"},
- {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02a807b2a58d5cdebb07050efe3d7deaf915468d112dfcf5e426d0564aa3aa4a"},
- {file = "greenlet-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b1660a15a446206c8545edc292ab5c48b91ff732f91b3d3b30d9a915d5ec4779"},
- {file = "greenlet-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:813720bd57e193391dfe26f4871186cf460848b83df7e23e6bef698a7624b4c9"},
- {file = "greenlet-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:aa15a2ec737cb609ed48902b45c5e4ff6044feb5dcdfcf6fa8482379190330d7"},
- {file = "greenlet-3.0.0-cp310-universal2-macosx_11_0_x86_64.whl", hash = "sha256:7709fd7bb02b31908dc8fd35bfd0a29fc24681d5cc9ac1d64ad07f8d2b7db62f"},
- {file = "greenlet-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:211ef8d174601b80e01436f4e6905aca341b15a566f35a10dd8d1e93f5dbb3b7"},
- {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6512592cc49b2c6d9b19fbaa0312124cd4c4c8a90d28473f86f92685cc5fef8e"},
- {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:871b0a8835f9e9d461b7fdaa1b57e3492dd45398e87324c047469ce2fc9f516c"},
- {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b505fcfc26f4148551826a96f7317e02c400665fa0883fe505d4fcaab1dabfdd"},
- {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123910c58234a8d40eaab595bc56a5ae49bdd90122dde5bdc012c20595a94c14"},
- {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:96d9ea57292f636ec851a9bb961a5cc0f9976900e16e5d5647f19aa36ba6366b"},
- {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b72b802496cccbd9b31acea72b6f87e7771ccfd7f7927437d592e5c92ed703c"},
- {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:527cd90ba3d8d7ae7dceb06fda619895768a46a1b4e423bdb24c1969823b8362"},
- {file = "greenlet-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:37f60b3a42d8b5499be910d1267b24355c495064f271cfe74bf28b17b099133c"},
- {file = "greenlet-3.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1482fba7fbed96ea7842b5a7fc11d61727e8be75a077e603e8ab49d24e234383"},
- {file = "greenlet-3.0.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:be557119bf467d37a8099d91fbf11b2de5eb1fd5fc5b91598407574848dc910f"},
- {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73b2f1922a39d5d59cc0e597987300df3396b148a9bd10b76a058a2f2772fc04"},
- {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1e22c22f7826096ad503e9bb681b05b8c1f5a8138469b255eb91f26a76634f2"},
- {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d363666acc21d2c204dd8705c0e0457d7b2ee7a76cb16ffc099d6799744ac99"},
- {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:334ef6ed8337bd0b58bb0ae4f7f2dcc84c9f116e474bb4ec250a8bb9bd797a66"},
- {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6672fdde0fd1a60b44fb1751a7779c6db487e42b0cc65e7caa6aa686874e79fb"},
- {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:952256c2bc5b4ee8df8dfc54fc4de330970bf5d79253c863fb5e6761f00dda35"},
- {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:269d06fa0f9624455ce08ae0179430eea61085e3cf6457f05982b37fd2cefe17"},
- {file = "greenlet-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9adbd8ecf097e34ada8efde9b6fec4dd2a903b1e98037adf72d12993a1c80b51"},
- {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b5ce7f40f0e2f8b88c28e6691ca6806814157ff05e794cdd161be928550f4c"},
- {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf94aa539e97a8411b5ea52fc6ccd8371be9550c4041011a091eb8b3ca1d810"},
- {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80dcd3c938cbcac986c5c92779db8e8ce51a89a849c135172c88ecbdc8c056b7"},
- {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e52a712c38e5fb4fd68e00dc3caf00b60cb65634d50e32281a9d6431b33b4af1"},
- {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5539f6da3418c3dc002739cb2bb8d169056aa66e0c83f6bacae0cd3ac26b423"},
- {file = "greenlet-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:343675e0da2f3c69d3fb1e894ba0a1acf58f481f3b9372ce1eb465ef93cf6fed"},
- {file = "greenlet-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:abe1ef3d780de56defd0c77c5ba95e152f4e4c4e12d7e11dd8447d338b85a625"},
- {file = "greenlet-3.0.0-cp37-cp37m-win32.whl", hash = "sha256:e693e759e172fa1c2c90d35dea4acbdd1d609b6936115d3739148d5e4cd11947"},
- {file = "greenlet-3.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bdd696947cd695924aecb3870660b7545a19851f93b9d327ef8236bfc49be705"},
- {file = "greenlet-3.0.0-cp37-universal2-macosx_11_0_x86_64.whl", hash = "sha256:cc3e2679ea13b4de79bdc44b25a0c4fcd5e94e21b8f290791744ac42d34a0353"},
- {file = "greenlet-3.0.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:63acdc34c9cde42a6534518e32ce55c30f932b473c62c235a466469a710bfbf9"},
- {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a1a6244ff96343e9994e37e5b4839f09a0207d35ef6134dce5c20d260d0302c"},
- {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b822fab253ac0f330ee807e7485769e3ac85d5eef827ca224feaaefa462dc0d0"},
- {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8060b32d8586e912a7b7dac2d15b28dbbd63a174ab32f5bc6d107a1c4143f40b"},
- {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:621fcb346141ae08cb95424ebfc5b014361621b8132c48e538e34c3c93ac7365"},
- {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6bb36985f606a7c49916eff74ab99399cdfd09241c375d5a820bb855dfb4af9f"},
- {file = "greenlet-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10b5582744abd9858947d163843d323d0b67be9432db50f8bf83031032bc218d"},
- {file = "greenlet-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f351479a6914fd81a55c8e68963609f792d9b067fb8a60a042c585a621e0de4f"},
- {file = "greenlet-3.0.0-cp38-cp38-win32.whl", hash = "sha256:9de687479faec7db5b198cc365bc34addd256b0028956501f4d4d5e9ca2e240a"},
- {file = "greenlet-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:3fd2b18432e7298fcbec3d39e1a0aa91ae9ea1c93356ec089421fabc3651572b"},
- {file = "greenlet-3.0.0-cp38-universal2-macosx_11_0_x86_64.whl", hash = "sha256:3c0d36f5adc6e6100aedbc976d7428a9f7194ea79911aa4bf471f44ee13a9464"},
- {file = "greenlet-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4cd83fb8d8e17633ad534d9ac93719ef8937568d730ef07ac3a98cb520fd93e4"},
- {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a5b2d4cdaf1c71057ff823a19d850ed5c6c2d3686cb71f73ae4d6382aaa7a06"},
- {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e7dcdfad252f2ca83c685b0fa9fba00e4d8f243b73839229d56ee3d9d219314"},
- {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c94e4e924d09b5a3e37b853fe5924a95eac058cb6f6fb437ebb588b7eda79870"},
- {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad6fb737e46b8bd63156b8f59ba6cdef46fe2b7db0c5804388a2d0519b8ddb99"},
- {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d55db1db455c59b46f794346efce896e754b8942817f46a1bada2d29446e305a"},
- {file = "greenlet-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:56867a3b3cf26dc8a0beecdb4459c59f4c47cdd5424618c08515f682e1d46692"},
- {file = "greenlet-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a812224a5fb17a538207e8cf8e86f517df2080c8ee0f8c1ed2bdaccd18f38f4"},
- {file = "greenlet-3.0.0-cp39-cp39-win32.whl", hash = "sha256:0d3f83ffb18dc57243e0151331e3c383b05e5b6c5029ac29f754745c800f8ed9"},
- {file = "greenlet-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:831d6f35037cf18ca5e80a737a27d822d87cd922521d18ed3dbc8a6967be50ce"},
- {file = "greenlet-3.0.0-cp39-universal2-macosx_11_0_x86_64.whl", hash = "sha256:a048293392d4e058298710a54dfaefcefdf49d287cd33fb1f7d63d55426e4355"},
- {file = "greenlet-3.0.0.tar.gz", hash = "sha256:19834e3f91f485442adc1ee440171ec5d9a4840a1f7bd5ed97833544719ce10b"},
+groups = ["main"]
+markers = "(platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") and (python_version <= \"3.11\" or python_version >= \"3.12\")"
+files = [
+ {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"},
+ {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"},
+ {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"},
+ {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"},
+ {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"},
+ {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"},
+ {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"},
+ {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"},
+ {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"},
+ {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"},
+ {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"},
+ {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"},
+ {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"},
+ {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"},
+ {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"},
+ {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"},
+ {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"},
+ {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"},
+ {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"},
+ {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"},
+ {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"},
+ {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"},
+ {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"},
+ {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"},
+ {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"},
+ {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"},
+ {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"},
+ {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"},
+ {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"},
+ {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"},
+ {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"},
+ {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"},
+ {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"},
+ {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"},
+ {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"},
+ {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"},
+ {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"},
+ {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"},
+ {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"},
+ {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"},
+ {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"},
+ {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"},
+ {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"},
]
[package.extras]
-docs = ["Sphinx"]
+docs = ["Sphinx", "furo"]
test = ["objgraph", "psutil"]
[[package]]
name = "gunicorn"
-version = "22.0.0"
+version = "23.0.0"
description = "WSGI HTTP Server for UNIX"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "gunicorn-22.0.0-py3-none-any.whl", hash = "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9"},
- {file = "gunicorn-22.0.0.tar.gz", hash = "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63"},
+ {file = "gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d"},
+ {file = "gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec"},
]
[package.dependencies]
@@ -812,18 +923,21 @@ tornado = ["tornado (>=0.2)"]
[[package]]
name = "housekeeper"
-version = "4.11.3"
+version = "4.13.2"
description = "Housekeeper takes care of files"
optional = false
python-versions = "*"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "housekeeper-4.11.3-py2.py3-none-any.whl", hash = "sha256:35d0b3155ad166edb0c7d384d5ef1246bd660c1637276926addbe9793a01b68f"},
- {file = "housekeeper-4.11.3.tar.gz", hash = "sha256:fd8b112e8be78aaf0a77695e53f4058cb4922c788d4daa20305db26a572d0035"},
+ {file = "housekeeper-4.13.2-py2.py3-none-any.whl", hash = "sha256:c62a470c0ac8eff591619335e151656cc05e32412ae1d222434707409cb25922"},
+ {file = "housekeeper-4.13.2.tar.gz", hash = "sha256:b81477f592f77484b5dfccb4ffdda55a7609bc455b25db5bfc7d3d5cd962a52a"},
]
[package.dependencies]
Click = "*"
coloredlogs = "*"
+cryptography = "*"
marshmallow = "*"
pymysql = "*"
pyyaml = "*"
@@ -836,6 +950,8 @@ version = "10.0"
description = "Human friendly output for text interfaces using Python"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"},
{file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"},
@@ -846,13 +962,15 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve
[[package]]
name = "identify"
-version = "2.5.30"
+version = "2.6.5"
description = "File identification library for Python"
-optional = false
-python-versions = ">=3.8"
+optional = true
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"pre-commit\""
files = [
- {file = "identify-2.5.30-py2.py3-none-any.whl", hash = "sha256:afe67f26ae29bab007ec21b03d4114f41316ab9dd15aa8736a167481e108da54"},
- {file = "identify-2.5.30.tar.gz", hash = "sha256:f302a4256a15c849b91cfcdcec052a8ce914634b2f77ae87dad29cd749f2d88d"},
+ {file = "identify-2.6.5-py2.py3-none-any.whl", hash = "sha256:14181a47091eb75b337af4c23078c9d09225cd4c48929f521f3bf16b09d02566"},
+ {file = "identify-2.6.5.tar.gz", hash = "sha256:c10b33f250e5bba374fae86fb57f3adcebf1161bce7cdf92031915fd480c13bc"},
]
[package.extras]
@@ -860,40 +978,53 @@ license = ["ukkonen"]
[[package]]
name = "idna"
-version = "3.7"
+version = "3.10"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
-python-versions = ">=3.5"
+python-versions = ">=3.6"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
- {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
+ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
+ {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
]
+[package.extras]
+all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
+
[[package]]
name = "importlib-metadata"
-version = "6.8.0"
+version = "8.5.0"
description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version < \"3.10\""
files = [
- {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"},
- {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"},
+ {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"},
+ {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"},
]
[package.dependencies]
-zipp = ">=0.5"
+zipp = ">=3.20"
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
+cover = ["pytest-cov"]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+enabler = ["pytest-enabler (>=2.2)"]
perf = ["ipython"]
-testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"]
+test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"]
+type = ["pytest-mypy"]
[[package]]
name = "iniconfig"
version = "2.0.0"
description = "brain-dead simple config-ini parsing"
-optional = false
+optional = true
python-versions = ">=3.7"
+groups = ["main"]
+markers = "(extra == \"pytest-cov\" or extra == \"pytest-mock\" or extra == \"pytest\" or extra == \"pytest-xdist\") and (python_version <= \"3.11\" or python_version >= \"3.12\")"
files = [
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
@@ -901,24 +1032,28 @@ files = [
[[package]]
name = "itsdangerous"
-version = "2.1.2"
+version = "2.2.0"
description = "Safely pass data to untrusted environments and back."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"},
- {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"},
+ {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"},
+ {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"},
]
[[package]]
name = "jinja2"
-version = "3.1.4"
+version = "3.1.5"
description = "A very fast and expressive template engine."
optional = false
python-versions = ">=3.7"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
- {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
+ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"},
+ {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"},
]
[package.dependencies]
@@ -929,120 +1064,171 @@ i18n = ["Babel (>=2.7)"]
[[package]]
name = "lxml"
-version = "4.9.3"
+version = "5.3.0"
description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*"
-files = [
- {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"},
- {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"},
- {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"},
- {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"},
- {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"},
- {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"},
- {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"},
- {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"},
- {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"},
- {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"},
- {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"},
- {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"},
- {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"},
- {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"},
- {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"},
- {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"},
- {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"},
- {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"},
- {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"},
- {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"},
- {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"},
- {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"},
- {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"},
- {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"},
- {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"},
- {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"},
- {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"},
- {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"},
- {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"},
- {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"},
- {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"},
- {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"},
- {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"},
- {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"},
- {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"},
- {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"},
- {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"},
- {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"},
- {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"},
- {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"},
- {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"},
- {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"},
- {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"},
- {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"},
- {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"},
- {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"},
- {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"},
- {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"},
- {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"},
- {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"},
- {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"},
- {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"},
- {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"},
- {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"},
- {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"},
- {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"},
- {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"},
- {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"},
- {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"},
- {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"},
- {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"},
- {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"},
- {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"},
- {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"},
- {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"},
- {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"},
- {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"},
- {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"},
- {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"},
- {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"},
- {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"},
- {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"},
- {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"},
- {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"},
- {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"},
- {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"},
- {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"},
- {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"},
- {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"},
- {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"},
- {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"},
- {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"},
- {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"},
- {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"},
- {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"},
- {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"},
- {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"},
- {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"},
- {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"},
- {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"},
- {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"},
- {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"},
+python-versions = ">=3.6"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
+files = [
+ {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"},
+ {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"},
+ {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"},
+ {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"},
+ {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"},
+ {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"},
+ {file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"},
+ {file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"},
+ {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"},
+ {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"},
+ {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"},
+ {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"},
+ {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"},
+ {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"},
+ {file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"},
+ {file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"},
+ {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"},
+ {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"},
+ {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"},
+ {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"},
+ {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"},
+ {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"},
+ {file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"},
+ {file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"},
+ {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"},
+ {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"},
+ {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"},
+ {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"},
+ {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"},
+ {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"},
+ {file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"},
+ {file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"},
+ {file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"},
+ {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"},
+ {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"},
+ {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"},
+ {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"},
+ {file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"},
+ {file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"},
+ {file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"},
+ {file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"},
+ {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"},
+ {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"},
+ {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"},
+ {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"},
+ {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"},
+ {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"},
+ {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"},
+ {file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"},
+ {file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"},
+ {file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"},
+ {file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"},
+ {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"},
+ {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"},
+ {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"},
+ {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"},
+ {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"},
+ {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"},
+ {file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"},
+ {file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"},
+ {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"},
+ {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"},
+ {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"},
+ {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"},
+ {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"},
+ {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"},
+ {file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"},
+ {file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"},
+ {file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"},
+ {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"},
+ {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"},
+ {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"},
+ {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"},
+ {file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"},
+ {file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"},
+ {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"},
+ {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"},
+ {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"},
+ {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"},
+ {file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"},
+ {file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"},
+ {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"},
+ {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"},
+ {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"},
+ {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"},
+ {file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"},
+ {file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"},
+ {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"},
+ {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"},
+ {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"},
+ {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"},
+ {file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"},
+ {file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"},
]
[package.extras]
cssselect = ["cssselect (>=0.7)"]
+html-clean = ["lxml-html-clean"]
html5 = ["html5lib"]
htmlsoup = ["BeautifulSoup4"]
-source = ["Cython (>=0.29.35)"]
+source = ["Cython (>=3.0.11)"]
[[package]]
name = "mako"
-version = "1.2.4"
+version = "1.3.8"
description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"},
- {file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"},
+ {file = "Mako-1.3.8-py3-none-any.whl", hash = "sha256:42f48953c7eb91332040ff567eb7eea69b22e7a4affbc5ba8e845e8f730f6627"},
+ {file = "mako-1.3.8.tar.gz", hash = "sha256:577b97e414580d3e088d47c2dbbe9594aa7a5146ed2875d4dfa9075af2dd3cc8"},
]
[package.dependencies]
@@ -1059,6 +1245,8 @@ version = "3.0.0"
description = "Python port of markdown-it. Markdown parsing, done right!"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
{file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
@@ -1079,92 +1267,96 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
[[package]]
name = "markupsafe"
-version = "2.1.3"
+version = "3.0.2"
description = "Safely add untrusted strings to HTML/XML markup."
optional = false
-python-versions = ">=3.7"
-files = [
- {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"},
- {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"},
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
+files = [
+ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"},
+ {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"},
]
[[package]]
name = "marshmallow"
-version = "3.20.1"
+version = "3.25.1"
description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "marshmallow-3.20.1-py3-none-any.whl", hash = "sha256:684939db93e80ad3561392f47be0230743131560a41c5110684c16e21ade0a5c"},
- {file = "marshmallow-3.20.1.tar.gz", hash = "sha256:5d2371bbe42000f2b3fb5eaa065224df7d8f8597bc19a1bbfa5bfe7fba8da889"},
+ {file = "marshmallow-3.25.1-py3-none-any.whl", hash = "sha256:ec5d00d873ce473b7f2ffcb7104286a376c354cab0c2fa12f5573dab03e87210"},
+ {file = "marshmallow-3.25.1.tar.gz", hash = "sha256:f4debda3bb11153d81ac34b0d582bf23053055ee11e791b54b4b35493468040a"},
]
[package.dependencies]
packaging = ">=17.0"
[package.extras]
-dev = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"]
-docs = ["alabaster (==0.7.13)", "autodocsumm (==0.2.11)", "sphinx (==7.0.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"]
-lint = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)"]
-tests = ["pytest", "pytz", "simplejson"]
+dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"]
+docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"]
+tests = ["pytest", "simplejson"]
[[package]]
name = "mdurl"
@@ -1172,6 +1364,8 @@ version = "0.1.2"
description = "Markdown URL utilities"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
@@ -1181,8 +1375,10 @@ files = [
name = "mock"
version = "5.1.0"
description = "Rolling backport of unittest.mock for all Pythons"
-optional = false
+optional = true
python-versions = ">=3.6"
+groups = ["main"]
+markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"mock\""
files = [
{file = "mock-5.1.0-py3-none-any.whl", hash = "sha256:18c694e5ae8a208cdb3d2c20a993ca1a7b0efa258c247a1e565150f477f83744"},
{file = "mock-5.1.0.tar.gz", hash = "sha256:5e96aad5ccda4718e0a229ed94b2024df75cc2d55575ba5762d31f5767b8767d"},
@@ -1195,122 +1391,212 @@ test = ["pytest", "pytest-cov"]
[[package]]
name = "msgpack"
-version = "1.0.7"
+version = "1.1.0"
description = "MessagePack serializer"
optional = false
python-versions = ">=3.8"
-files = [
- {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:04ad6069c86e531682f9e1e71b71c1c3937d6014a7c3e9edd2aa81ad58842862"},
- {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cca1b62fe70d761a282496b96a5e51c44c213e410a964bdffe0928e611368329"},
- {file = "msgpack-1.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e50ebce52f41370707f1e21a59514e3375e3edd6e1832f5e5235237db933c98b"},
- {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b4f35de6a304b5533c238bee86b670b75b03d31b7797929caa7a624b5dda6"},
- {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28efb066cde83c479dfe5a48141a53bc7e5f13f785b92ddde336c716663039ee"},
- {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cb14ce54d9b857be9591ac364cb08dc2d6a5c4318c1182cb1d02274029d590d"},
- {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b573a43ef7c368ba4ea06050a957c2a7550f729c31f11dd616d2ac4aba99888d"},
- {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ccf9a39706b604d884d2cb1e27fe973bc55f2890c52f38df742bc1d79ab9f5e1"},
- {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cb70766519500281815dfd7a87d3a178acf7ce95390544b8c90587d76b227681"},
- {file = "msgpack-1.0.7-cp310-cp310-win32.whl", hash = "sha256:b610ff0f24e9f11c9ae653c67ff8cc03c075131401b3e5ef4b82570d1728f8a9"},
- {file = "msgpack-1.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:a40821a89dc373d6427e2b44b572efc36a2778d3f543299e2f24eb1a5de65415"},
- {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:576eb384292b139821c41995523654ad82d1916da6a60cff129c715a6223ea84"},
- {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:730076207cb816138cf1af7f7237b208340a2c5e749707457d70705715c93b93"},
- {file = "msgpack-1.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:85765fdf4b27eb5086f05ac0491090fc76f4f2b28e09d9350c31aac25a5aaff8"},
- {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3476fae43db72bd11f29a5147ae2f3cb22e2f1a91d575ef130d2bf49afd21c46"},
- {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d4c80667de2e36970ebf74f42d1088cc9ee7ef5f4e8c35eee1b40eafd33ca5b"},
- {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b0bf0effb196ed76b7ad883848143427a73c355ae8e569fa538365064188b8e"},
- {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f9a7c509542db4eceed3dcf21ee5267ab565a83555c9b88a8109dcecc4709002"},
- {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:84b0daf226913133f899ea9b30618722d45feffa67e4fe867b0b5ae83a34060c"},
- {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ec79ff6159dffcc30853b2ad612ed572af86c92b5168aa3fc01a67b0fa40665e"},
- {file = "msgpack-1.0.7-cp311-cp311-win32.whl", hash = "sha256:3e7bf4442b310ff154b7bb9d81eb2c016b7d597e364f97d72b1acc3817a0fdc1"},
- {file = "msgpack-1.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:3f0c8c6dfa6605ab8ff0611995ee30d4f9fcff89966cf562733b4008a3d60d82"},
- {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f0936e08e0003f66bfd97e74ee530427707297b0d0361247e9b4f59ab78ddc8b"},
- {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98bbd754a422a0b123c66a4c341de0474cad4a5c10c164ceed6ea090f3563db4"},
- {file = "msgpack-1.0.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b291f0ee7961a597cbbcc77709374087fa2a9afe7bdb6a40dbbd9b127e79afee"},
- {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebbbba226f0a108a7366bf4b59bf0f30a12fd5e75100c630267d94d7f0ad20e5"},
- {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e2d69948e4132813b8d1131f29f9101bc2c915f26089a6d632001a5c1349672"},
- {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdf38ba2d393c7911ae989c3bbba510ebbcdf4ecbdbfec36272abe350c454075"},
- {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:993584fc821c58d5993521bfdcd31a4adf025c7d745bbd4d12ccfecf695af5ba"},
- {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:52700dc63a4676669b341ba33520f4d6e43d3ca58d422e22ba66d1736b0a6e4c"},
- {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e45ae4927759289c30ccba8d9fdce62bb414977ba158286b5ddaf8df2cddb5c5"},
- {file = "msgpack-1.0.7-cp312-cp312-win32.whl", hash = "sha256:27dcd6f46a21c18fa5e5deed92a43d4554e3df8d8ca5a47bf0615d6a5f39dbc9"},
- {file = "msgpack-1.0.7-cp312-cp312-win_amd64.whl", hash = "sha256:7687e22a31e976a0e7fc99c2f4d11ca45eff652a81eb8c8085e9609298916dcf"},
- {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5b6ccc0c85916998d788b295765ea0e9cb9aac7e4a8ed71d12e7d8ac31c23c95"},
- {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:235a31ec7db685f5c82233bddf9858748b89b8119bf4538d514536c485c15fe0"},
- {file = "msgpack-1.0.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cab3db8bab4b7e635c1c97270d7a4b2a90c070b33cbc00c99ef3f9be03d3e1f7"},
- {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bfdd914e55e0d2c9e1526de210f6fe8ffe9705f2b1dfcc4aecc92a4cb4b533d"},
- {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36e17c4592231a7dbd2ed09027823ab295d2791b3b1efb2aee874b10548b7524"},
- {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38949d30b11ae5f95c3c91917ee7a6b239f5ec276f271f28638dec9156f82cfc"},
- {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ff1d0899f104f3921d94579a5638847f783c9b04f2d5f229392ca77fba5b82fc"},
- {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dc43f1ec66eb8440567186ae2f8c447d91e0372d793dfe8c222aec857b81a8cf"},
- {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dd632777ff3beaaf629f1ab4396caf7ba0bdd075d948a69460d13d44357aca4c"},
- {file = "msgpack-1.0.7-cp38-cp38-win32.whl", hash = "sha256:4e71bc4416de195d6e9b4ee93ad3f2f6b2ce11d042b4d7a7ee00bbe0358bd0c2"},
- {file = "msgpack-1.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:8f5b234f567cf76ee489502ceb7165c2a5cecec081db2b37e35332b537f8157c"},
- {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfef2bb6ef068827bbd021017a107194956918ab43ce4d6dc945ffa13efbc25f"},
- {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:484ae3240666ad34cfa31eea7b8c6cd2f1fdaae21d73ce2974211df099a95d81"},
- {file = "msgpack-1.0.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3967e4ad1aa9da62fd53e346ed17d7b2e922cba5ab93bdd46febcac39be636fc"},
- {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dd178c4c80706546702c59529ffc005681bd6dc2ea234c450661b205445a34d"},
- {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ffbc252eb0d229aeb2f9ad051200668fc3a9aaa8994e49f0cb2ffe2b7867e7"},
- {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:822ea70dc4018c7e6223f13affd1c5c30c0f5c12ac1f96cd8e9949acddb48a61"},
- {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:384d779f0d6f1b110eae74cb0659d9aa6ff35aaf547b3955abf2ab4c901c4819"},
- {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f64e376cd20d3f030190e8c32e1c64582eba56ac6dc7d5b0b49a9d44021b52fd"},
- {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ed82f5a7af3697b1c4786053736f24a0efd0a1b8a130d4c7bfee4b9ded0f08f"},
- {file = "msgpack-1.0.7-cp39-cp39-win32.whl", hash = "sha256:f26a07a6e877c76a88e3cecac8531908d980d3d5067ff69213653649ec0f60ad"},
- {file = "msgpack-1.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:1dc93e8e4653bdb5910aed79f11e165c85732067614f180f70534f056da97db3"},
- {file = "msgpack-1.0.7.tar.gz", hash = "sha256:572efc93db7a4d27e404501975ca6d2d9775705c2d922390d878fcf768d92c87"},
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
+files = [
+ {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"},
+ {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"},
+ {file = "msgpack-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:914571a2a5b4e7606997e169f64ce53a8b1e06f2cf2c3a7273aa106236d43dd5"},
+ {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921af52214dcbb75e6bdf6a661b23c3e6417f00c603dd2070bccb5c3ef499f5"},
+ {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8ce0b22b890be5d252de90d0e0d119f363012027cf256185fc3d474c44b1b9e"},
+ {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73322a6cc57fcee3c0c57c4463d828e9428275fb85a27aa2aa1a92fdc42afd7b"},
+ {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1f3c3d21f7cf67bcf2da8e494d30a75e4cf60041d98b3f79875afb5b96f3a3f"},
+ {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64fc9068d701233effd61b19efb1485587560b66fe57b3e50d29c5d78e7fef68"},
+ {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:42f754515e0f683f9c79210a5d1cad631ec3d06cea5172214d2176a42e67e19b"},
+ {file = "msgpack-1.1.0-cp310-cp310-win32.whl", hash = "sha256:3df7e6b05571b3814361e8464f9304c42d2196808e0119f55d0d3e62cd5ea044"},
+ {file = "msgpack-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:685ec345eefc757a7c8af44a3032734a739f8c45d1b0ac45efc5d8977aa4720f"},
+ {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7"},
+ {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa"},
+ {file = "msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701"},
+ {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e59bca908d9ca0de3dc8684f21ebf9a690fe47b6be93236eb40b99af28b6ea6"},
+ {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59"},
+ {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452aff037287acb1d70a804ffd022b21fa2bb7c46bee884dbc864cc9024128a0"},
+ {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8da4bf6d54ceed70e8861f833f83ce0814a2b72102e890cbdfe4b34764cdd66e"},
+ {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:41c991beebf175faf352fb940bf2af9ad1fb77fd25f38d9142053914947cdbf6"},
+ {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a52a1f3a5af7ba1c9ace055b659189f6c669cf3657095b50f9602af3a3ba0fe5"},
+ {file = "msgpack-1.1.0-cp311-cp311-win32.whl", hash = "sha256:58638690ebd0a06427c5fe1a227bb6b8b9fdc2bd07701bec13c2335c82131a88"},
+ {file = "msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788"},
+ {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d"},
+ {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2"},
+ {file = "msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420"},
+ {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2"},
+ {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39"},
+ {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f"},
+ {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247"},
+ {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c"},
+ {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b"},
+ {file = "msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b"},
+ {file = "msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f"},
+ {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf"},
+ {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330"},
+ {file = "msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734"},
+ {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59caf6a4ed0d164055ccff8fe31eddc0ebc07cf7326a2aaa0dbf7a4001cd823e"},
+ {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca"},
+ {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65553c9b6da8166e819a6aa90ad15288599b340f91d18f60b2061f402b9a4915"},
+ {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7a946a8992941fea80ed4beae6bff74ffd7ee129a90b4dd5cf9c476a30e9708d"},
+ {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4b51405e36e075193bc051315dbf29168d6141ae2500ba8cd80a522964e31434"},
+ {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4c01941fd2ff87c2a934ee6055bda4ed353a7846b8d4f341c428109e9fcde8c"},
+ {file = "msgpack-1.1.0-cp313-cp313-win32.whl", hash = "sha256:7c9a35ce2c2573bada929e0b7b3576de647b0defbd25f5139dcdaba0ae35a4cc"},
+ {file = "msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f"},
+ {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c40ffa9a15d74e05ba1fe2681ea33b9caffd886675412612d93ab17b58ea2fec"},
+ {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ba6136e650898082d9d5a5217d5906d1e138024f836ff48691784bbe1adf96"},
+ {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0856a2b7e8dcb874be44fea031d22e5b3a19121be92a1e098f46068a11b0870"},
+ {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:471e27a5787a2e3f974ba023f9e265a8c7cfd373632247deb225617e3100a3c7"},
+ {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:646afc8102935a388ffc3914b336d22d1c2d6209c773f3eb5dd4d6d3b6f8c1cb"},
+ {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13599f8829cfbe0158f6456374e9eea9f44eee08076291771d8ae93eda56607f"},
+ {file = "msgpack-1.1.0-cp38-cp38-win32.whl", hash = "sha256:8a84efb768fb968381e525eeeb3d92857e4985aacc39f3c47ffd00eb4509315b"},
+ {file = "msgpack-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:879a7b7b0ad82481c52d3c7eb99bf6f0645dbdec5134a4bddbd16f3506947feb"},
+ {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:53258eeb7a80fc46f62fd59c876957a2d0e15e6449a9e71842b6d24419d88ca1"},
+ {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e7b853bbc44fb03fbdba34feb4bd414322180135e2cb5164f20ce1c9795ee48"},
+ {file = "msgpack-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3e9b4936df53b970513eac1758f3882c88658a220b58dcc1e39606dccaaf01c"},
+ {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46c34e99110762a76e3911fc923222472c9d681f1094096ac4102c18319e6468"},
+ {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a706d1e74dd3dea05cb54580d9bd8b2880e9264856ce5068027eed09680aa74"},
+ {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:534480ee5690ab3cbed89d4c8971a5c631b69a8c0883ecfea96c19118510c846"},
+ {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8cf9e8c3a2153934a23ac160cc4cba0ec035f6867c8013cc6077a79823370346"},
+ {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3180065ec2abbe13a4ad37688b61b99d7f9e012a535b930e0e683ad6bc30155b"},
+ {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5a91481a3cc573ac8c0d9aace09345d989dc4a0202b7fcb312c88c26d4e71a8"},
+ {file = "msgpack-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f80bc7d47f76089633763f952e67f8214cb7b3ee6bfa489b3cb6a84cfac114cd"},
+ {file = "msgpack-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d1b7ff2d6146e16e8bd665ac726a89c74163ef8cd39fa8c1087d4e52d3a2325"},
+ {file = "msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e"},
]
[[package]]
name = "nodeenv"
-version = "1.8.0"
+version = "1.9.1"
description = "Node.js virtual environment builder"
-optional = false
-python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
+optional = true
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+groups = ["main"]
+markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"pre-commit\""
files = [
- {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"},
- {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"},
+ {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"},
+ {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
]
-[package.dependencies]
-setuptools = "*"
+[[package]]
+name = "numpy"
+version = "2.0.2"
+description = "Fundamental package for array computing in Python"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "python_version < \"3.11\""
+files = [
+ {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"},
+ {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"},
+ {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"},
+ {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"},
+ {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"},
+ {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"},
+ {file = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"},
+ {file = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"},
+ {file = "numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"},
+ {file = "numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"},
+ {file = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"},
+ {file = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"},
+ {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"},
+ {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"},
+ {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"},
+ {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"},
+ {file = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"},
+ {file = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"},
+ {file = "numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"},
+ {file = "numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"},
+ {file = "numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b"},
+ {file = "numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e"},
+ {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c"},
+ {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c"},
+ {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692"},
+ {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a"},
+ {file = "numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c"},
+ {file = "numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded"},
+ {file = "numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5"},
+ {file = "numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a"},
+ {file = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"},
+ {file = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"},
+ {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"},
+ {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"},
+ {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"},
+ {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"},
+ {file = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"},
+ {file = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"},
+ {file = "numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"},
+ {file = "numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"},
+ {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"},
+ {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"},
+ {file = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"},
+ {file = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"},
+ {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"},
+]
[[package]]
name = "numpy"
-version = "1.26.1"
+version = "2.2.1"
description = "Fundamental package for array computing in Python"
optional = false
-python-versions = "<3.13,>=3.9"
-files = [
- {file = "numpy-1.26.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82e871307a6331b5f09efda3c22e03c095d957f04bf6bc1804f30048d0e5e7af"},
- {file = "numpy-1.26.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdd9ec98f0063d93baeb01aad472a1a0840dee302842a2746a7a8e92968f9575"},
- {file = "numpy-1.26.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d78f269e0c4fd365fc2992c00353e4530d274ba68f15e968d8bc3c69ce5f5244"},
- {file = "numpy-1.26.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ab9163ca8aeb7fd32fe93866490654d2f7dda4e61bc6297bf72ce07fdc02f67"},
- {file = "numpy-1.26.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:78ca54b2f9daffa5f323f34cdf21e1d9779a54073f0018a3094ab907938331a2"},
- {file = "numpy-1.26.1-cp310-cp310-win32.whl", hash = "sha256:d1cfc92db6af1fd37a7bb58e55c8383b4aa1ba23d012bdbba26b4bcca45ac297"},
- {file = "numpy-1.26.1-cp310-cp310-win_amd64.whl", hash = "sha256:d2984cb6caaf05294b8466966627e80bf6c7afd273279077679cb010acb0e5ab"},
- {file = "numpy-1.26.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cd7837b2b734ca72959a1caf3309457a318c934abef7a43a14bb984e574bbb9a"},
- {file = "numpy-1.26.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c59c046c31a43310ad0199d6299e59f57a289e22f0f36951ced1c9eac3665b9"},
- {file = "numpy-1.26.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d58e8c51a7cf43090d124d5073bc29ab2755822181fcad978b12e144e5e5a4b3"},
- {file = "numpy-1.26.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6081aed64714a18c72b168a9276095ef9155dd7888b9e74b5987808f0dd0a974"},
- {file = "numpy-1.26.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:97e5d6a9f0702c2863aaabf19f0d1b6c2628fbe476438ce0b5ce06e83085064c"},
- {file = "numpy-1.26.1-cp311-cp311-win32.whl", hash = "sha256:b9d45d1dbb9de84894cc50efece5b09939752a2d75aab3a8b0cef6f3a35ecd6b"},
- {file = "numpy-1.26.1-cp311-cp311-win_amd64.whl", hash = "sha256:3649d566e2fc067597125428db15d60eb42a4e0897fc48d28cb75dc2e0454e53"},
- {file = "numpy-1.26.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1d1bd82d539607951cac963388534da3b7ea0e18b149a53cf883d8f699178c0f"},
- {file = "numpy-1.26.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:afd5ced4e5a96dac6725daeb5242a35494243f2239244fad10a90ce58b071d24"},
- {file = "numpy-1.26.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a03fb25610ef560a6201ff06df4f8105292ba56e7cdd196ea350d123fc32e24e"},
- {file = "numpy-1.26.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcfaf015b79d1f9f9c9fd0731a907407dc3e45769262d657d754c3a028586124"},
- {file = "numpy-1.26.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e509cbc488c735b43b5ffea175235cec24bbc57b227ef1acc691725beb230d1c"},
- {file = "numpy-1.26.1-cp312-cp312-win32.whl", hash = "sha256:af22f3d8e228d84d1c0c44c1fbdeb80f97a15a0abe4f080960393a00db733b66"},
- {file = "numpy-1.26.1-cp312-cp312-win_amd64.whl", hash = "sha256:9f42284ebf91bdf32fafac29d29d4c07e5e9d1af862ea73686581773ef9e73a7"},
- {file = "numpy-1.26.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb894accfd16b867d8643fc2ba6c8617c78ba2828051e9a69511644ce86ce83e"},
- {file = "numpy-1.26.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e44ccb93f30c75dfc0c3aa3ce38f33486a75ec9abadabd4e59f114994a9c4617"},
- {file = "numpy-1.26.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9696aa2e35cc41e398a6d42d147cf326f8f9d81befcb399bc1ed7ffea339b64e"},
- {file = "numpy-1.26.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5b411040beead47a228bde3b2241100454a6abde9df139ed087bd73fc0a4908"},
- {file = "numpy-1.26.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1e11668d6f756ca5ef534b5be8653d16c5352cbb210a5c2a79ff288e937010d5"},
- {file = "numpy-1.26.1-cp39-cp39-win32.whl", hash = "sha256:d1d2c6b7dd618c41e202c59c1413ef9b2c8e8a15f5039e344af64195459e3104"},
- {file = "numpy-1.26.1-cp39-cp39-win_amd64.whl", hash = "sha256:59227c981d43425ca5e5c01094d59eb14e8772ce6975d4b2fc1e106a833d5ae2"},
- {file = "numpy-1.26.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:06934e1a22c54636a059215d6da99e23286424f316fddd979f5071093b648668"},
- {file = "numpy-1.26.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76ff661a867d9272cd2a99eed002470f46dbe0943a5ffd140f49be84f68ffc42"},
- {file = "numpy-1.26.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6965888d65d2848e8768824ca8288db0a81263c1efccec881cb35a0d805fcd2f"},
- {file = "numpy-1.26.1.tar.gz", hash = "sha256:c8c6c72d4a9f831f328efb1312642a1cafafaa88981d9ab76368d50d07d93cbe"},
+python-versions = ">=3.10"
+groups = ["main"]
+markers = "python_version == \"3.11\" or python_version >= \"3.12\""
+files = [
+ {file = "numpy-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5edb4e4caf751c1518e6a26a83501fda79bff41cc59dac48d70e6d65d4ec4440"},
+ {file = "numpy-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aa3017c40d513ccac9621a2364f939d39e550c542eb2a894b4c8da92b38896ab"},
+ {file = "numpy-2.2.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:61048b4a49b1c93fe13426e04e04fdf5a03f456616f6e98c7576144677598675"},
+ {file = "numpy-2.2.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:7671dc19c7019103ca44e8d94917eba8534c76133523ca8406822efdd19c9308"},
+ {file = "numpy-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4250888bcb96617e00bfa28ac24850a83c9f3a16db471eca2ee1f1714df0f957"},
+ {file = "numpy-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7746f235c47abc72b102d3bce9977714c2444bdfaea7888d241b4c4bb6a78bf"},
+ {file = "numpy-2.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:059e6a747ae84fce488c3ee397cee7e5f905fd1bda5fb18c66bc41807ff119b2"},
+ {file = "numpy-2.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f62aa6ee4eb43b024b0e5a01cf65a0bb078ef8c395e8713c6e8a12a697144528"},
+ {file = "numpy-2.2.1-cp310-cp310-win32.whl", hash = "sha256:48fd472630715e1c1c89bf1feab55c29098cb403cc184b4859f9c86d4fcb6a95"},
+ {file = "numpy-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:b541032178a718c165a49638d28272b771053f628382d5e9d1c93df23ff58dbf"},
+ {file = "numpy-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40f9e544c1c56ba8f1cf7686a8c9b5bb249e665d40d626a23899ba6d5d9e1484"},
+ {file = "numpy-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9b57eaa3b0cd8db52049ed0330747b0364e899e8a606a624813452b8203d5f7"},
+ {file = "numpy-2.2.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:bc8a37ad5b22c08e2dbd27df2b3ef7e5c0864235805b1e718a235bcb200cf1cb"},
+ {file = "numpy-2.2.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9036d6365d13b6cbe8f27a0eaf73ddcc070cae584e5ff94bb45e3e9d729feab5"},
+ {file = "numpy-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51faf345324db860b515d3f364eaa93d0e0551a88d6218a7d61286554d190d73"},
+ {file = "numpy-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38efc1e56b73cc9b182fe55e56e63b044dd26a72128fd2fbd502f75555d92591"},
+ {file = "numpy-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:31b89fa67a8042e96715c68e071a1200c4e172f93b0fbe01a14c0ff3ff820fc8"},
+ {file = "numpy-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4c86e2a209199ead7ee0af65e1d9992d1dce7e1f63c4b9a616500f93820658d0"},
+ {file = "numpy-2.2.1-cp311-cp311-win32.whl", hash = "sha256:b34d87e8a3090ea626003f87f9392b3929a7bbf4104a05b6667348b6bd4bf1cd"},
+ {file = "numpy-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:360137f8fb1b753c5cde3ac388597ad680eccbbbb3865ab65efea062c4a1fd16"},
+ {file = "numpy-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:694f9e921a0c8f252980e85bce61ebbd07ed2b7d4fa72d0e4246f2f8aa6642ab"},
+ {file = "numpy-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3683a8d166f2692664262fd4900f207791d005fb088d7fdb973cc8d663626faa"},
+ {file = "numpy-2.2.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:780077d95eafc2ccc3ced969db22377b3864e5b9a0ea5eb347cc93b3ea900315"},
+ {file = "numpy-2.2.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:55ba24ebe208344aa7a00e4482f65742969a039c2acfcb910bc6fcd776eb4355"},
+ {file = "numpy-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b1d07b53b78bf84a96898c1bc139ad7f10fda7423f5fd158fd0f47ec5e01ac7"},
+ {file = "numpy-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5062dc1a4e32a10dc2b8b13cedd58988261416e811c1dc4dbdea4f57eea61b0d"},
+ {file = "numpy-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fce4f615f8ca31b2e61aa0eb5865a21e14f5629515c9151850aa936c02a1ee51"},
+ {file = "numpy-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:67d4cda6fa6ffa073b08c8372aa5fa767ceb10c9a0587c707505a6d426f4e046"},
+ {file = "numpy-2.2.1-cp312-cp312-win32.whl", hash = "sha256:32cb94448be47c500d2c7a95f93e2f21a01f1fd05dd2beea1ccd049bb6001cd2"},
+ {file = "numpy-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:ba5511d8f31c033a5fcbda22dd5c813630af98c70b2661f2d2c654ae3cdfcfc8"},
+ {file = "numpy-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f1d09e520217618e76396377c81fba6f290d5f926f50c35f3a5f72b01a0da780"},
+ {file = "numpy-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3ecc47cd7f6ea0336042be87d9e7da378e5c7e9b3c8ad0f7c966f714fc10d821"},
+ {file = "numpy-2.2.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:f419290bc8968a46c4933158c91a0012b7a99bb2e465d5ef5293879742f8797e"},
+ {file = "numpy-2.2.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5b6c390bfaef8c45a260554888966618328d30e72173697e5cabe6b285fb2348"},
+ {file = "numpy-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:526fc406ab991a340744aad7e25251dd47a6720a685fa3331e5c59fef5282a59"},
+ {file = "numpy-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f74e6fdeb9a265624ec3a3918430205dff1df7e95a230779746a6af78bc615af"},
+ {file = "numpy-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:53c09385ff0b72ba79d8715683c1168c12e0b6e84fb0372e97553d1ea91efe51"},
+ {file = "numpy-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f3eac17d9ec51be534685ba877b6ab5edc3ab7ec95c8f163e5d7b39859524716"},
+ {file = "numpy-2.2.1-cp313-cp313-win32.whl", hash = "sha256:9ad014faa93dbb52c80d8f4d3dcf855865c876c9660cb9bd7553843dd03a4b1e"},
+ {file = "numpy-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:164a829b6aacf79ca47ba4814b130c4020b202522a93d7bff2202bfb33b61c60"},
+ {file = "numpy-2.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4dfda918a13cc4f81e9118dea249e192ab167a0bb1966272d5503e39234d694e"},
+ {file = "numpy-2.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:733585f9f4b62e9b3528dd1070ec4f52b8acf64215b60a845fa13ebd73cd0712"},
+ {file = "numpy-2.2.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:89b16a18e7bba224ce5114db863e7029803c179979e1af6ad6a6b11f70545008"},
+ {file = "numpy-2.2.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:676f4eebf6b2d430300f1f4f4c2461685f8269f94c89698d832cdf9277f30b84"},
+ {file = "numpy-2.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f5cdf9f493b35f7e41e8368e7d7b4bbafaf9660cba53fb21d2cd174ec09631"},
+ {file = "numpy-2.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1ad395cf254c4fbb5b2132fee391f361a6e8c1adbd28f2cd8e79308a615fe9d"},
+ {file = "numpy-2.2.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:08ef779aed40dbc52729d6ffe7dd51df85796a702afbf68a4f4e41fafdc8bda5"},
+ {file = "numpy-2.2.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:26c9c4382b19fcfbbed3238a14abf7ff223890ea1936b8890f058e7ba35e8d71"},
+ {file = "numpy-2.2.1-cp313-cp313t-win32.whl", hash = "sha256:93cf4e045bae74c90ca833cba583c14b62cb4ba2cba0abd2b141ab52548247e2"},
+ {file = "numpy-2.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:bff7d8ec20f5f42607599f9994770fa65d76edca264a87b5e4ea5629bce12268"},
+ {file = "numpy-2.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7ba9cc93a91d86365a5d270dee221fdc04fb68d7478e6bf6af650de78a8339e3"},
+ {file = "numpy-2.2.1-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:3d03883435a19794e41f147612a77a8f56d4e52822337844fff3d4040a142964"},
+ {file = "numpy-2.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4511d9e6071452b944207c8ce46ad2f897307910b402ea5fa975da32e0102800"},
+ {file = "numpy-2.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5c5cc0cbabe9452038ed984d05ac87910f89370b9242371bd9079cb4af61811e"},
+ {file = "numpy-2.2.1.tar.gz", hash = "sha256:45681fd7128c8ad1c379f0ca0776a8b0c6583d2f69889ddac01559dfe4390918"},
]
[[package]]
@@ -1319,6 +1605,8 @@ version = "3.2.2"
description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic"
optional = false
python-versions = ">=3.6"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"},
{file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"},
@@ -1331,13 +1619,15 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
[[package]]
name = "openpyxl"
-version = "3.0.10"
+version = "3.1.5"
description = "A Python library to read/write Excel 2010 xlsx/xlsm files"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "openpyxl-3.0.10-py2.py3-none-any.whl", hash = "sha256:0ab6d25d01799f97a9464630abacbb34aafecdcaa0ef3cba6d6b3499867d0355"},
- {file = "openpyxl-3.0.10.tar.gz", hash = "sha256:e47805627aebcf860edb4edf7987b1309c1b3632f3750538ed962bbcc3bd7449"},
+ {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"},
+ {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"},
]
[package.dependencies]
@@ -1345,47 +1635,68 @@ et-xmlfile = "*"
[[package]]
name = "packaging"
-version = "23.2"
+version = "24.2"
description = "Core utilities for Python packages"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
- {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
+ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"},
+ {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"},
]
[[package]]
name = "pandas"
-version = "2.1.1"
+version = "2.2.3"
description = "Powerful data structures for data analysis, time series, and statistics"
optional = false
python-versions = ">=3.9"
-files = [
- {file = "pandas-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58d997dbee0d4b64f3cb881a24f918b5f25dd64ddf31f467bb9b67ae4c63a1e4"},
- {file = "pandas-2.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02304e11582c5d090e5a52aec726f31fe3f42895d6bfc1f28738f9b64b6f0614"},
- {file = "pandas-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffa8f0966de2c22de408d0e322db2faed6f6e74265aa0856f3824813cf124363"},
- {file = "pandas-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1f84c144dee086fe4f04a472b5cd51e680f061adf75c1ae4fc3a9275560f8f4"},
- {file = "pandas-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75ce97667d06d69396d72be074f0556698c7f662029322027c226fd7a26965cb"},
- {file = "pandas-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:4c3f32fd7c4dccd035f71734df39231ac1a6ff95e8bdab8d891167197b7018d2"},
- {file = "pandas-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e2959720b70e106bb1d8b6eadd8ecd7c8e99ccdbe03ee03260877184bb2877d"},
- {file = "pandas-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25e8474a8eb258e391e30c288eecec565bfed3e026f312b0cbd709a63906b6f8"},
- {file = "pandas-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8bd1685556f3374520466998929bade3076aeae77c3e67ada5ed2b90b4de7f0"},
- {file = "pandas-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc3657869c7902810f32bd072f0740487f9e030c1a3ab03e0af093db35a9d14e"},
- {file = "pandas-2.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:05674536bd477af36aa2effd4ec8f71b92234ce0cc174de34fd21e2ee99adbc2"},
- {file = "pandas-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:b407381258a667df49d58a1b637be33e514b07f9285feb27769cedb3ab3d0b3a"},
- {file = "pandas-2.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c747793c4e9dcece7bb20156179529898abf505fe32cb40c4052107a3c620b49"},
- {file = "pandas-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3bcad1e6fb34b727b016775bea407311f7721db87e5b409e6542f4546a4951ea"},
- {file = "pandas-2.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5ec7740f9ccb90aec64edd71434711f58ee0ea7f5ed4ac48be11cfa9abf7317"},
- {file = "pandas-2.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29deb61de5a8a93bdd033df328441a79fcf8dd3c12d5ed0b41a395eef9cd76f0"},
- {file = "pandas-2.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4f99bebf19b7e03cf80a4e770a3e65eee9dd4e2679039f542d7c1ace7b7b1daa"},
- {file = "pandas-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:84e7e910096416adec68075dc87b986ff202920fb8704e6d9c8c9897fe7332d6"},
- {file = "pandas-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366da7b0e540d1b908886d4feb3d951f2f1e572e655c1160f5fde28ad4abb750"},
- {file = "pandas-2.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e50e72b667415a816ac27dfcfe686dc5a0b02202e06196b943d54c4f9c7693e"},
- {file = "pandas-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc1ab6a25da197f03ebe6d8fa17273126120874386b4ac11c1d687df288542dd"},
- {file = "pandas-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0dbfea0dd3901ad4ce2306575c54348d98499c95be01b8d885a2737fe4d7a98"},
- {file = "pandas-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0489b0e6aa3d907e909aef92975edae89b1ee1654db5eafb9be633b0124abe97"},
- {file = "pandas-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:4cdb0fab0400c2cb46dafcf1a0fe084c8bb2480a1fa8d81e19d15e12e6d4ded2"},
- {file = "pandas-2.1.1.tar.gz", hash = "sha256:fecb198dc389429be557cde50a2d46da8434a17fe37d7d41ff102e3987fd947b"},
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
+files = [
+ {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"},
+ {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"},
+ {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"},
+ {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"},
+ {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"},
+ {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"},
+ {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"},
+ {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"},
+ {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"},
+ {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"},
+ {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"},
+ {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"},
+ {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"},
+ {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"},
+ {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"},
+ {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"},
+ {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"},
+ {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"},
+ {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"},
+ {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"},
+ {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"},
+ {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"},
+ {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"},
+ {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"},
+ {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"},
+ {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"},
+ {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"},
+ {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"},
+ {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"},
+ {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"},
+ {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"},
+ {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"},
+ {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"},
+ {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"},
+ {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"},
+ {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"},
+ {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"},
+ {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"},
+ {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"},
+ {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"},
+ {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"},
+ {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"},
]
[package.dependencies]
@@ -1396,41 +1707,44 @@ numpy = [
]
python-dateutil = ">=2.8.2"
pytz = ">=2020.1"
-tzdata = ">=2022.1"
+tzdata = ">=2022.7"
[package.extras]
-all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"]
-aws = ["s3fs (>=2022.05.0)"]
-clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"]
-compression = ["zstandard (>=0.17.0)"]
-computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"]
+all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"]
+aws = ["s3fs (>=2022.11.0)"]
+clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"]
+compression = ["zstandard (>=0.19.0)"]
+computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"]
consortium-standard = ["dataframe-api-compat (>=0.1.7)"]
-excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"]
-feather = ["pyarrow (>=7.0.0)"]
-fss = ["fsspec (>=2022.05.0)"]
-gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"]
-hdf5 = ["tables (>=3.7.0)"]
-html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"]
-mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"]
-output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"]
-parquet = ["pyarrow (>=7.0.0)"]
-performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"]
-plot = ["matplotlib (>=3.6.1)"]
-postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"]
-spss = ["pyreadstat (>=1.1.5)"]
-sql-other = ["SQLAlchemy (>=1.4.36)"]
-test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"]
-xml = ["lxml (>=4.8.0)"]
+excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"]
+feather = ["pyarrow (>=10.0.1)"]
+fss = ["fsspec (>=2022.11.0)"]
+gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"]
+hdf5 = ["tables (>=3.8.0)"]
+html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"]
+mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"]
+output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"]
+parquet = ["pyarrow (>=10.0.1)"]
+performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"]
+plot = ["matplotlib (>=3.6.3)"]
+postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"]
+pyarrow = ["pyarrow (>=10.0.1)"]
+spss = ["pyreadstat (>=1.2.0)"]
+sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"]
+test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"]
+xml = ["lxml (>=4.9.2)"]
[[package]]
name = "paramiko"
-version = "3.4.0"
+version = "3.5.0"
description = "SSH2 protocol library"
optional = false
python-versions = ">=3.6"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "paramiko-3.4.0-py3-none-any.whl", hash = "sha256:43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7"},
- {file = "paramiko-3.4.0.tar.gz", hash = "sha256:aac08f26a31dc4dffd92821527d1682d99d52f9ef6851968114a8728f3c274d3"},
+ {file = "paramiko-3.5.0-py3-none-any.whl", hash = "sha256:1fedf06b085359051cd7d0d270cebe19e755a8a921cc2ddbfa647fb0cd7d68f9"},
+ {file = "paramiko-3.5.0.tar.gz", hash = "sha256:ad11e540da4f55cedda52931f1a3f812a8238a7af7f62a60de538cd80bb28124"},
]
[package.dependencies]
@@ -1449,34 +1763,41 @@ version = "2.6"
description = "Generate human-readable, random object names"
optional = false
python-versions = "*"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "petname-2.6.tar.gz", hash = "sha256:981c31ef772356a373640d1bb7c67c102e0159eda14578c67a1c99d5b34c9e4c"},
]
[[package]]
name = "platformdirs"
-version = "3.11.0"
-description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
-optional = false
-python-versions = ">=3.7"
+version = "4.3.6"
+description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
+optional = true
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"pre-commit\""
files = [
- {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"},
- {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"},
+ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"},
+ {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"},
]
[package.extras]
-docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"]
-test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"]
+docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"]
+type = ["mypy (>=1.11.2)"]
[[package]]
name = "pluggy"
-version = "1.3.0"
+version = "1.5.0"
description = "plugin and hook calling mechanisms for python"
-optional = false
+optional = true
python-versions = ">=3.8"
+groups = ["main"]
+markers = "(extra == \"pytest-cov\" or extra == \"pytest-mock\" or extra == \"pytest\" or extra == \"pytest-xdist\") and (python_version <= \"3.11\" or python_version >= \"3.12\")"
files = [
- {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"},
- {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"},
+ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
+ {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
]
[package.extras]
@@ -1485,13 +1806,15 @@ testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "pre-commit"
-version = "3.4.0"
+version = "4.0.1"
description = "A framework for managing and maintaining multi-language pre-commit hooks."
-optional = false
-python-versions = ">=3.8"
+optional = true
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"pre-commit\""
files = [
- {file = "pre_commit-3.4.0-py2.py3-none-any.whl", hash = "sha256:96d529a951f8b677f730a7212442027e8ba53f9b04d217c4c67dc56c393ad945"},
- {file = "pre_commit-3.4.0.tar.gz", hash = "sha256:6bbd5129a64cad4c0dfaeeb12cd8f7ea7e15b77028d985341478c8af3c759522"},
+ {file = "pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878"},
+ {file = "pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2"},
]
[package.dependencies]
@@ -1503,64 +1826,76 @@ virtualenv = ">=20.10.0"
[[package]]
name = "psutil"
-version = "5.9.5"
+version = "6.1.1"
description = "Cross-platform lib for process and system monitoring in Python."
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-files = [
- {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"},
- {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"},
- {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"},
- {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"},
- {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"},
- {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"},
- {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"},
- {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"},
- {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"},
- {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"},
- {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"},
- {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"},
- {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"},
- {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"},
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
+files = [
+ {file = "psutil-6.1.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9ccc4316f24409159897799b83004cb1e24f9819b0dcf9c0b68bdcb6cefee6a8"},
+ {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ca9609c77ea3b8481ab005da74ed894035936223422dc591d6772b147421f777"},
+ {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8df0178ba8a9e5bc84fed9cfa61d54601b371fbec5c8eebad27575f1e105c0d4"},
+ {file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:1924e659d6c19c647e763e78670a05dbb7feaf44a0e9c94bf9e14dfc6ba50468"},
+ {file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:018aeae2af92d943fdf1da6b58665124897cfc94faa2ca92098838f83e1b1bca"},
+ {file = "psutil-6.1.1-cp27-none-win32.whl", hash = "sha256:6d4281f5bbca041e2292be3380ec56a9413b790579b8e593b1784499d0005dac"},
+ {file = "psutil-6.1.1-cp27-none-win_amd64.whl", hash = "sha256:c777eb75bb33c47377c9af68f30e9f11bc78e0f07fbf907be4a5d70b2fe5f030"},
+ {file = "psutil-6.1.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed7fe2231a444fc219b9c42d0376e0a9a1a72f16c5cfa0f68d19f1a0663e8"},
+ {file = "psutil-6.1.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0bdd4eab935276290ad3cb718e9809412895ca6b5b334f5a9111ee6d9aff9377"},
+ {file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6e06c20c05fe95a3d7302d74e7097756d4ba1247975ad6905441ae1b5b66003"},
+ {file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97f7cb9921fbec4904f522d972f0c0e1f4fabbdd4e0287813b21215074a0f160"},
+ {file = "psutil-6.1.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33431e84fee02bc84ea36d9e2c4a6d395d479c9dd9bba2376c1f6ee8f3a4e0b3"},
+ {file = "psutil-6.1.1-cp36-cp36m-win32.whl", hash = "sha256:384636b1a64b47814437d1173be1427a7c83681b17a450bfc309a1953e329603"},
+ {file = "psutil-6.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8be07491f6ebe1a693f17d4f11e69d0dc1811fa082736500f649f79df7735303"},
+ {file = "psutil-6.1.1-cp37-abi3-win32.whl", hash = "sha256:eaa912e0b11848c4d9279a93d7e2783df352b082f40111e078388701fd479e53"},
+ {file = "psutil-6.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:f35cfccb065fff93529d2afb4a2e89e363fe63ca1e4a5da22b603a85833c2649"},
+ {file = "psutil-6.1.1.tar.gz", hash = "sha256:cf8496728c18f2d0b45198f06895be52f36611711746b7f30c464b422b50e2f5"},
]
[package.extras]
-test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"]
+dev = ["abi3audit", "black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"]
+test = ["pytest", "pytest-xdist", "setuptools"]
[[package]]
name = "pyasn1"
-version = "0.5.0"
+version = "0.6.1"
description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)"
optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"},
- {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"},
+ {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"},
+ {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"},
]
[[package]]
name = "pyasn1-modules"
-version = "0.3.0"
+version = "0.4.1"
description = "A collection of ASN.1-based protocols modules"
optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"},
- {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"},
+ {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"},
+ {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"},
]
[package.dependencies]
-pyasn1 = ">=0.4.6,<0.6.0"
+pyasn1 = ">=0.4.6,<0.7.0"
[[package]]
name = "pycparser"
-version = "2.21"
+version = "2.22"
description = "C parser in Python"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
- {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
+ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
+ {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
]
[[package]]
@@ -1569,6 +1904,8 @@ version = "2.7.4"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"},
{file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"},
@@ -1588,6 +1925,8 @@ version = "2.18.4"
description = "Core functionality for Pydantic validation and serialization"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"},
{file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"},
@@ -1675,13 +2014,15 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pydantic-settings"
-version = "2.3.3"
+version = "2.7.1"
description = "Settings management using Pydantic"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "pydantic_settings-2.3.3-py3-none-any.whl", hash = "sha256:e4ed62ad851670975ec11285141db888fd24947f9440bd4380d7d8788d4965de"},
- {file = "pydantic_settings-2.3.3.tar.gz", hash = "sha256:87fda838b64b5039b970cd47c3e8a1ee460ce136278ff672980af21516f6e6ce"},
+ {file = "pydantic_settings-2.7.1-py3-none-any.whl", hash = "sha256:590be9e6e24d06db33a4262829edef682500ef008565a969c73d39d5f8bfb3fd"},
+ {file = "pydantic_settings-2.7.1.tar.gz", hash = "sha256:10c9caad35e64bfb3c2fbf70a078c0e25cc92499782e5200747f942a065dec93"},
]
[package.dependencies]
@@ -1689,22 +2030,25 @@ pydantic = ">=2.7.0"
python-dotenv = ">=0.21.0"
[package.extras]
+azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"]
toml = ["tomli (>=2.0.1)"]
yaml = ["pyyaml (>=6.0.1)"]
[[package]]
name = "pygments"
-version = "2.16.1"
+version = "2.19.1"
description = "Pygments is a syntax highlighting package written in Python."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"},
- {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"},
+ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"},
+ {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"},
]
[package.extras]
-plugins = ["importlib-metadata"]
+windows-terminal = ["colorama (>=0.4.6)"]
[[package]]
name = "pymysql"
@@ -1712,6 +2056,8 @@ version = "1.1.1"
description = "Pure Python MySQL Driver"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "PyMySQL-1.1.1-py3-none-any.whl", hash = "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c"},
{file = "pymysql-1.1.1.tar.gz", hash = "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0"},
@@ -1727,6 +2073,8 @@ version = "1.5.0"
description = "Python binding to the Networking and Cryptography (NaCl) library"
optional = false
python-versions = ">=3.6"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"},
{file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"},
@@ -1749,24 +2097,31 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
[[package]]
name = "pyreadline3"
-version = "3.4.1"
+version = "3.5.4"
description = "A python implementation of GNU readline."
optional = false
-python-versions = "*"
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "sys_platform == \"win32\" and (python_version <= \"3.11\" or python_version >= \"3.12\")"
files = [
- {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"},
- {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"},
+ {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"},
+ {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"},
]
+[package.extras]
+dev = ["build", "flake8", "mypy", "pytest", "twine"]
+
[[package]]
name = "pytest"
-version = "7.4.2"
+version = "8.3.4"
description = "pytest: simple powerful testing with Python"
-optional = false
-python-versions = ">=3.7"
+optional = true
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "(extra == \"pytest-cov\" or extra == \"pytest-mock\" or extra == \"pytest\" or extra == \"pytest-xdist\") and (python_version <= \"3.11\" or python_version >= \"3.12\")"
files = [
- {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"},
- {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"},
+ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"},
+ {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"},
]
[package.dependencies]
@@ -1774,61 +2129,67 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
iniconfig = "*"
packaging = "*"
-pluggy = ">=0.12,<2.0"
-tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
+pluggy = ">=1.5,<2"
+tomli = {version = ">=1", markers = "python_version < \"3.11\""}
[package.extras]
-testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "pytest-cov"
-version = "4.1.0"
+version = "6.0.0"
description = "Pytest plugin for measuring coverage."
-optional = false
-python-versions = ">=3.7"
+optional = true
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"pytest-cov\""
files = [
- {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"},
- {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"},
+ {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"},
+ {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"},
]
[package.dependencies]
-coverage = {version = ">=5.2.1", extras = ["toml"]}
+coverage = {version = ">=7.5", extras = ["toml"]}
pytest = ">=4.6"
[package.extras]
-testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
+testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"]
[[package]]
name = "pytest-mock"
-version = "3.11.1"
+version = "3.14.0"
description = "Thin-wrapper around the mock package for easier use with pytest"
-optional = false
-python-versions = ">=3.7"
+optional = true
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"pytest-mock\""
files = [
- {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"},
- {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"},
+ {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"},
+ {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"},
]
[package.dependencies]
-pytest = ">=5.0"
+pytest = ">=6.2.5"
[package.extras]
dev = ["pre-commit", "pytest-asyncio", "tox"]
[[package]]
name = "pytest-xdist"
-version = "3.5.0"
+version = "3.6.1"
description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
-optional = false
-python-versions = ">=3.7"
+optional = true
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"pytest-xdist\""
files = [
- {file = "pytest-xdist-3.5.0.tar.gz", hash = "sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a"},
- {file = "pytest_xdist-3.5.0-py3-none-any.whl", hash = "sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24"},
+ {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"},
+ {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"},
]
[package.dependencies]
-execnet = ">=1.1"
-pytest = ">=6.2.0"
+execnet = ">=2.1"
+pytest = ">=7.0.0"
[package.extras]
psutil = ["psutil (>=3.0)"]
@@ -1837,13 +2198,15 @@ testing = ["filelock"]
[[package]]
name = "python-dateutil"
-version = "2.8.2"
+version = "2.9.0.post0"
description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
- {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
+ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
+ {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
]
[package.dependencies]
@@ -1855,6 +2218,8 @@ version = "1.0.1"
description = "Read key-value pairs from a .env file and set them as environment variables"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
{file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
@@ -1865,83 +2230,92 @@ cli = ["click (>=5.0)"]
[[package]]
name = "pytz"
-version = "2023.3.post1"
+version = "2024.2"
description = "World timezone definitions, modern and historical"
optional = false
python-versions = "*"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"},
- {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"},
+ {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"},
+ {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"},
]
[[package]]
name = "pyyaml"
-version = "6.0.1"
+version = "6.0.2"
description = "YAML parser and emitter for Python"
optional = false
-python-versions = ">=3.6"
-files = [
- {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
- {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
- {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
- {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
- {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
- {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
- {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
- {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
- {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
- {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
- {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
- {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
- {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
- {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
- {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
- {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
- {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"},
- {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"},
- {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"},
- {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"},
- {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"},
- {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"},
- {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"},
- {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"},
- {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"},
- {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"},
- {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
- {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
- {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
- {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
- {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
- {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
- {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
- {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"},
- {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
- {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
- {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
- {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
- {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
- {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
- {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
+files = [
+ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
+ {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
+ {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
+ {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
+ {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
+ {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
+ {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
+ {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
+ {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
+ {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
+ {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
+ {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
+ {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
+ {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
+ {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
+ {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
+ {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
+ {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
+ {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},
+ {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},
+ {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},
+ {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},
+ {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},
+ {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},
+ {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"},
+ {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"},
+ {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"},
+ {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"},
+ {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"},
+ {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"},
+ {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"},
+ {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"},
+ {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"},
+ {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"},
+ {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"},
+ {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"},
+ {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"},
+ {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"},
+ {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"},
+ {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"},
+ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
]
[[package]]
name = "requests"
-version = "2.32.2"
+version = "2.32.3"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "requests-2.32.2-py3-none-any.whl", hash = "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c"},
- {file = "requests-2.32.2.tar.gz", hash = "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289"},
+ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
+ {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
]
[package.dependencies]
@@ -1956,13 +2330,15 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "requests-oauthlib"
-version = "1.3.1"
+version = "2.0.0"
description = "OAuthlib authentication support for Requests."
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=3.4"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"},
- {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"},
+ {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"},
+ {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"},
]
[package.dependencies]
@@ -1974,28 +2350,55 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"]
[[package]]
name = "rich"
-version = "13.6.0"
+version = "13.9.4"
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
optional = false
-python-versions = ">=3.7.0"
+python-versions = ">=3.8.0"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"},
- {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"},
+ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"},
+ {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"},
]
[package.dependencies]
markdown-it-py = ">=2.2.0"
pygments = ">=2.13.0,<3.0.0"
+typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""}
[package.extras]
jupyter = ["ipywidgets (>=7.5.1,<9)"]
+[[package]]
+name = "rich-click"
+version = "1.8.5"
+description = "Format click help output nicely with rich"
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
+files = [
+ {file = "rich_click-1.8.5-py3-none-any.whl", hash = "sha256:0fab7bb5b66c15da17c210b4104277cd45f3653a7322e0098820a169880baee0"},
+ {file = "rich_click-1.8.5.tar.gz", hash = "sha256:a3eebe81da1c9da3c32f3810017c79bd687ff1b3fa35bfc9d8a3338797f1d1a1"},
+]
+
+[package.dependencies]
+click = ">=7"
+rich = ">=10.7"
+typing_extensions = ">=4"
+
+[package.extras]
+dev = ["mypy", "packaging", "pre-commit", "pytest", "pytest-cov", "rich-codex", "ruff", "types-setuptools"]
+docs = ["markdown_include", "mkdocs", "mkdocs-glightbox", "mkdocs-material-extensions", "mkdocs-material[imaging] (>=9.5.18,<9.6.0)", "mkdocs-rss-plugin", "mkdocstrings[python]", "rich-codex"]
+
[[package]]
name = "rsa"
version = "4.9"
description = "Pure-Python RSA implementation"
optional = false
python-versions = ">=3.6,<4"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
{file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
@@ -2006,131 +2409,132 @@ pyasn1 = ">=0.1.3"
[[package]]
name = "ruff"
-version = "0.0.292"
-description = "An extremely fast Python linter, written in Rust."
-optional = false
+version = "0.9.1"
+description = "An extremely fast Python linter and code formatter, written in Rust."
+optional = true
python-versions = ">=3.7"
-files = [
- {file = "ruff-0.0.292-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:02f29db018c9d474270c704e6c6b13b18ed0ecac82761e4fcf0faa3728430c96"},
- {file = "ruff-0.0.292-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:69654e564342f507edfa09ee6897883ca76e331d4bbc3676d8a8403838e9fade"},
- {file = "ruff-0.0.292-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c3c91859a9b845c33778f11902e7b26440d64b9d5110edd4e4fa1726c41e0a4"},
- {file = "ruff-0.0.292-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f4476f1243af2d8c29da5f235c13dca52177117935e1f9393f9d90f9833f69e4"},
- {file = "ruff-0.0.292-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be8eb50eaf8648070b8e58ece8e69c9322d34afe367eec4210fdee9a555e4ca7"},
- {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9889bac18a0c07018aac75ef6c1e6511d8411724d67cb879103b01758e110a81"},
- {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bdfabd4334684a4418b99b3118793f2c13bb67bf1540a769d7816410402a205"},
- {file = "ruff-0.0.292-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7c77c53bfcd75dbcd4d1f42d6cabf2485d2e1ee0678da850f08e1ab13081a8"},
- {file = "ruff-0.0.292-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e087b24d0d849c5c81516ec740bf4fd48bf363cfb104545464e0fca749b6af9"},
- {file = "ruff-0.0.292-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f160b5ec26be32362d0774964e218f3fcf0a7da299f7e220ef45ae9e3e67101a"},
- {file = "ruff-0.0.292-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ac153eee6dd4444501c4bb92bff866491d4bfb01ce26dd2fff7ca472c8df9ad0"},
- {file = "ruff-0.0.292-py3-none-musllinux_1_2_i686.whl", hash = "sha256:87616771e72820800b8faea82edd858324b29bb99a920d6aa3d3949dd3f88fb0"},
- {file = "ruff-0.0.292-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b76deb3bdbea2ef97db286cf953488745dd6424c122d275f05836c53f62d4016"},
- {file = "ruff-0.0.292-py3-none-win32.whl", hash = "sha256:e854b05408f7a8033a027e4b1c7f9889563dd2aca545d13d06711e5c39c3d003"},
- {file = "ruff-0.0.292-py3-none-win_amd64.whl", hash = "sha256:f27282bedfd04d4c3492e5c3398360c9d86a295be00eccc63914438b4ac8a83c"},
- {file = "ruff-0.0.292-py3-none-win_arm64.whl", hash = "sha256:7f67a69c8f12fbc8daf6ae6d36705037bde315abf8b82b6e1f4c9e74eb750f68"},
- {file = "ruff-0.0.292.tar.gz", hash = "sha256:1093449e37dd1e9b813798f6ad70932b57cf614e5c2b5c51005bf67d55db33ac"},
-]
-
-[[package]]
-name = "setuptools"
-version = "70.3.0"
-description = "Easily download, build, install, upgrade, and uninstall Python packages"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "setuptools-70.3.0-py3-none-any.whl", hash = "sha256:fe384da74336c398e0d956d1cae0669bc02eed936cdb1d49b57de1990dc11ffc"},
- {file = "setuptools-70.3.0.tar.gz", hash = "sha256:f171bab1dfbc86b132997f26a119f6056a57950d058587841a0082e8830f9dc5"},
+groups = ["main"]
+markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ruff\""
+files = [
+ {file = "ruff-0.9.1-py3-none-linux_armv6l.whl", hash = "sha256:84330dda7abcc270e6055551aca93fdde1b0685fc4fd358f26410f9349cf1743"},
+ {file = "ruff-0.9.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3cae39ba5d137054b0e5b472aee3b78a7c884e61591b100aeb544bcd1fc38d4f"},
+ {file = "ruff-0.9.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:50c647ff96f4ba288db0ad87048257753733763b409b2faf2ea78b45c8bb7fcb"},
+ {file = "ruff-0.9.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0c8b149e9c7353cace7d698e1656ffcf1e36e50f8ea3b5d5f7f87ff9986a7ca"},
+ {file = "ruff-0.9.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:beb3298604540c884d8b282fe7625651378e1986c25df51dec5b2f60cafc31ce"},
+ {file = "ruff-0.9.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39d0174ccc45c439093971cc06ed3ac4dc545f5e8bdacf9f067adf879544d969"},
+ {file = "ruff-0.9.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:69572926c0f0c9912288915214ca9b2809525ea263603370b9e00bed2ba56dbd"},
+ {file = "ruff-0.9.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:937267afce0c9170d6d29f01fcd1f4378172dec6760a9f4dface48cdabf9610a"},
+ {file = "ruff-0.9.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:186c2313de946f2c22bdf5954b8dd083e124bcfb685732cfb0beae0c47233d9b"},
+ {file = "ruff-0.9.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f94942a3bb767675d9a051867c036655fe9f6c8a491539156a6f7e6b5f31831"},
+ {file = "ruff-0.9.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:728d791b769cc28c05f12c280f99e8896932e9833fef1dd8756a6af2261fd1ab"},
+ {file = "ruff-0.9.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2f312c86fb40c5c02b44a29a750ee3b21002bd813b5233facdaf63a51d9a85e1"},
+ {file = "ruff-0.9.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ae017c3a29bee341ba584f3823f805abbe5fe9cd97f87ed07ecbf533c4c88366"},
+ {file = "ruff-0.9.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5dc40a378a0e21b4cfe2b8a0f1812a6572fc7b230ef12cd9fac9161aa91d807f"},
+ {file = "ruff-0.9.1-py3-none-win32.whl", hash = "sha256:46ebf5cc106cf7e7378ca3c28ce4293b61b449cd121b98699be727d40b79ba72"},
+ {file = "ruff-0.9.1-py3-none-win_amd64.whl", hash = "sha256:342a824b46ddbcdddd3abfbb332fa7fcaac5488bf18073e841236aadf4ad5c19"},
+ {file = "ruff-0.9.1-py3-none-win_arm64.whl", hash = "sha256:1cd76c7f9c679e6e8f2af8f778367dca82b95009bc7b1a85a47f1521ae524fa7"},
+ {file = "ruff-0.9.1.tar.gz", hash = "sha256:fd2b25ecaf907d6458fa842675382c8597b3c746a2dde6717fe3415425df0c17"},
]
-[package.extras]
-doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
-test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
-
[[package]]
name = "six"
-version = "1.16.0"
+version = "1.17.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
- {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"},
+ {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},
]
[[package]]
name = "sqlalchemy"
-version = "2.0.22"
+version = "2.0.37"
description = "Database Abstraction Library"
optional = false
python-versions = ">=3.7"
-files = [
- {file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f146c61ae128ab43ea3a0955de1af7e1633942c2b2b4985ac51cc292daf33222"},
- {file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:875de9414393e778b655a3d97d60465eb3fae7c919e88b70cc10b40b9f56042d"},
- {file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13790cb42f917c45c9c850b39b9941539ca8ee7917dacf099cc0b569f3d40da7"},
- {file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e04ab55cf49daf1aeb8c622c54d23fa4bec91cb051a43cc24351ba97e1dd09f5"},
- {file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a42c9fa3abcda0dcfad053e49c4f752eef71ecd8c155221e18b99d4224621176"},
- {file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:14cd3bcbb853379fef2cd01e7c64a5d6f1d005406d877ed9509afb7a05ff40a5"},
- {file = "SQLAlchemy-2.0.22-cp310-cp310-win32.whl", hash = "sha256:d143c5a9dada696bcfdb96ba2de4a47d5a89168e71d05a076e88a01386872f97"},
- {file = "SQLAlchemy-2.0.22-cp310-cp310-win_amd64.whl", hash = "sha256:ccd87c25e4c8559e1b918d46b4fa90b37f459c9b4566f1dfbce0eb8122571547"},
- {file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f6ff392b27a743c1ad346d215655503cec64405d3b694228b3454878bf21590"},
- {file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f776c2c30f0e5f4db45c3ee11a5f2a8d9de68e81eb73ec4237de1e32e04ae81c"},
- {file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8f1792d20d2f4e875ce7a113f43c3561ad12b34ff796b84002a256f37ce9437"},
- {file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80eeb5189d7d4b1af519fc3f148fe7521b9dfce8f4d6a0820e8f5769b005051"},
- {file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69fd9e41cf9368afa034e1c81f3570afb96f30fcd2eb1ef29cb4d9371c6eece2"},
- {file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54bcceaf4eebef07dadfde424f5c26b491e4a64e61761dea9459103ecd6ccc95"},
- {file = "SQLAlchemy-2.0.22-cp311-cp311-win32.whl", hash = "sha256:7ee7ccf47aa503033b6afd57efbac6b9e05180f492aeed9fcf70752556f95624"},
- {file = "SQLAlchemy-2.0.22-cp311-cp311-win_amd64.whl", hash = "sha256:b560f075c151900587ade06706b0c51d04b3277c111151997ea0813455378ae0"},
- {file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2c9bac865ee06d27a1533471405ad240a6f5d83195eca481f9fc4a71d8b87df8"},
- {file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:625b72d77ac8ac23da3b1622e2da88c4aedaee14df47c8432bf8f6495e655de2"},
- {file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b39a6e21110204a8c08d40ff56a73ba542ec60bab701c36ce721e7990df49fb9"},
- {file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53a766cb0b468223cafdf63e2d37f14a4757476157927b09300c8c5832d88560"},
- {file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0e1ce8ebd2e040357dde01a3fb7d30d9b5736b3e54a94002641dfd0aa12ae6ce"},
- {file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:505f503763a767556fa4deae5194b2be056b64ecca72ac65224381a0acab7ebe"},
- {file = "SQLAlchemy-2.0.22-cp312-cp312-win32.whl", hash = "sha256:154a32f3c7b00de3d090bc60ec8006a78149e221f1182e3edcf0376016be9396"},
- {file = "SQLAlchemy-2.0.22-cp312-cp312-win_amd64.whl", hash = "sha256:129415f89744b05741c6f0b04a84525f37fbabe5dc3774f7edf100e7458c48cd"},
- {file = "SQLAlchemy-2.0.22-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3940677d341f2b685a999bffe7078697b5848a40b5f6952794ffcf3af150c301"},
- {file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55914d45a631b81a8a2cb1a54f03eea265cf1783241ac55396ec6d735be14883"},
- {file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2096d6b018d242a2bcc9e451618166f860bb0304f590d205173d317b69986c95"},
- {file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:19c6986cf2fb4bc8e0e846f97f4135a8e753b57d2aaaa87c50f9acbe606bd1db"},
- {file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ac28bd6888fe3c81fbe97584eb0b96804bd7032d6100b9701255d9441373ec1"},
- {file = "SQLAlchemy-2.0.22-cp37-cp37m-win32.whl", hash = "sha256:cb9a758ad973e795267da334a92dd82bb7555cb36a0960dcabcf724d26299db8"},
- {file = "SQLAlchemy-2.0.22-cp37-cp37m-win_amd64.whl", hash = "sha256:40b1206a0d923e73aa54f0a6bd61419a96b914f1cd19900b6c8226899d9742ad"},
- {file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3aa1472bf44f61dd27987cd051f1c893b7d3b17238bff8c23fceaef4f1133868"},
- {file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:56a7e2bb639df9263bf6418231bc2a92a773f57886d371ddb7a869a24919face"},
- {file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccca778c0737a773a1ad86b68bda52a71ad5950b25e120b6eb1330f0df54c3d0"},
- {file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6c3e9350f9fb16de5b5e5fbf17b578811a52d71bb784cc5ff71acb7de2a7f9"},
- {file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:564e9f9e4e6466273dbfab0e0a2e5fe819eec480c57b53a2cdee8e4fdae3ad5f"},
- {file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:af66001d7b76a3fab0d5e4c1ec9339ac45748bc4a399cbc2baa48c1980d3c1f4"},
- {file = "SQLAlchemy-2.0.22-cp38-cp38-win32.whl", hash = "sha256:9e55dff5ec115316dd7a083cdc1a52de63693695aecf72bc53a8e1468ce429e5"},
- {file = "SQLAlchemy-2.0.22-cp38-cp38-win_amd64.whl", hash = "sha256:4e869a8ff7ee7a833b74868a0887e8462445ec462432d8cbeff5e85f475186da"},
- {file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9886a72c8e6371280cb247c5d32c9c8fa141dc560124348762db8a8b236f8692"},
- {file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a571bc8ac092a3175a1d994794a8e7a1f2f651e7c744de24a19b4f740fe95034"},
- {file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8db5ba8b7da759b727faebc4289a9e6a51edadc7fc32207a30f7c6203a181592"},
- {file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b0b3f2686c3f162123adba3cb8b626ed7e9b8433ab528e36ed270b4f70d1cdb"},
- {file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c1fea8c0abcb070ffe15311853abfda4e55bf7dc1d4889497b3403629f3bf00"},
- {file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4bb062784f37b2d75fd9b074c8ec360ad5df71f933f927e9e95c50eb8e05323c"},
- {file = "SQLAlchemy-2.0.22-cp39-cp39-win32.whl", hash = "sha256:58a3aba1bfb32ae7af68da3f277ed91d9f57620cf7ce651db96636790a78b736"},
- {file = "SQLAlchemy-2.0.22-cp39-cp39-win_amd64.whl", hash = "sha256:92e512a6af769e4725fa5b25981ba790335d42c5977e94ded07db7d641490a85"},
- {file = "SQLAlchemy-2.0.22-py3-none-any.whl", hash = "sha256:3076740335e4aaadd7deb3fe6dcb96b3015f1613bd190a4e1634e1b99b02ec86"},
- {file = "SQLAlchemy-2.0.22.tar.gz", hash = "sha256:5434cc601aa17570d79e5377f5fd45ff92f9379e2abed0be5e8c2fba8d353d2b"},
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
+files = [
+ {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da36c3b0e891808a7542c5c89f224520b9a16c7f5e4d6a1156955605e54aef0e"},
+ {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e7402ff96e2b073a98ef6d6142796426d705addd27b9d26c3b32dbaa06d7d069"},
+ {file = "SQLAlchemy-2.0.37-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6f5d254a22394847245f411a2956976401e84da4288aa70cbcd5190744062c1"},
+ {file = "SQLAlchemy-2.0.37-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41296bbcaa55ef5fdd32389a35c710133b097f7b2609d8218c0eabded43a1d84"},
+ {file = "SQLAlchemy-2.0.37-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bedee60385c1c0411378cbd4dc486362f5ee88deceea50002772912d798bb00f"},
+ {file = "SQLAlchemy-2.0.37-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6c67415258f9f3c69867ec02fea1bf6508153709ecbd731a982442a590f2b7e4"},
+ {file = "SQLAlchemy-2.0.37-cp310-cp310-win32.whl", hash = "sha256:650dcb70739957a492ad8acff65d099a9586b9b8920e3507ca61ec3ce650bb72"},
+ {file = "SQLAlchemy-2.0.37-cp310-cp310-win_amd64.whl", hash = "sha256:93d1543cd8359040c02b6614421c8e10cd7a788c40047dbc507ed46c29ae5636"},
+ {file = "SQLAlchemy-2.0.37-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:78361be6dc9073ed17ab380985d1e45e48a642313ab68ab6afa2457354ff692c"},
+ {file = "SQLAlchemy-2.0.37-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b661b49d0cb0ab311a189b31e25576b7ac3e20783beb1e1817d72d9d02508bf5"},
+ {file = "SQLAlchemy-2.0.37-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d57bafbab289e147d064ffbd5cca2d7b1394b63417c0636cea1f2e93d16eb9e8"},
+ {file = "SQLAlchemy-2.0.37-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa2c0913f02341d25fb858e4fb2031e6b0813494cca1ba07d417674128ce11b"},
+ {file = "SQLAlchemy-2.0.37-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9df21b8d9e5c136ea6cde1c50d2b1c29a2b5ff2b1d610165c23ff250e0704087"},
+ {file = "SQLAlchemy-2.0.37-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db18ff6b8c0f1917f8b20f8eca35c28bbccb9f83afa94743e03d40203ed83de9"},
+ {file = "SQLAlchemy-2.0.37-cp311-cp311-win32.whl", hash = "sha256:46954173612617a99a64aee103bcd3f078901b9a8dcfc6ae80cbf34ba23df989"},
+ {file = "SQLAlchemy-2.0.37-cp311-cp311-win_amd64.whl", hash = "sha256:7b7e772dc4bc507fdec4ee20182f15bd60d2a84f1e087a8accf5b5b7a0dcf2ba"},
+ {file = "SQLAlchemy-2.0.37-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2952748ecd67ed3b56773c185e85fc084f6bdcdec10e5032a7c25a6bc7d682ef"},
+ {file = "SQLAlchemy-2.0.37-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3151822aa1db0eb5afd65ccfafebe0ef5cda3a7701a279c8d0bf17781a793bb4"},
+ {file = "SQLAlchemy-2.0.37-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaa8039b6d20137a4e02603aba37d12cd2dde7887500b8855356682fc33933f4"},
+ {file = "SQLAlchemy-2.0.37-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cdba1f73b64530c47b27118b7053b8447e6d6f3c8104e3ac59f3d40c33aa9fd"},
+ {file = "SQLAlchemy-2.0.37-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1b2690456528a87234a75d1a1644cdb330a6926f455403c8e4f6cad6921f9098"},
+ {file = "SQLAlchemy-2.0.37-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf5ae8a9dcf657fd72144a7fd01f243236ea39e7344e579a121c4205aedf07bb"},
+ {file = "SQLAlchemy-2.0.37-cp312-cp312-win32.whl", hash = "sha256:ea308cec940905ba008291d93619d92edaf83232ec85fbd514dcb329f3192761"},
+ {file = "SQLAlchemy-2.0.37-cp312-cp312-win_amd64.whl", hash = "sha256:635d8a21577341dfe4f7fa59ec394b346da12420b86624a69e466d446de16aff"},
+ {file = "SQLAlchemy-2.0.37-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8c4096727193762e72ce9437e2a86a110cf081241919ce3fab8e89c02f6b6658"},
+ {file = "SQLAlchemy-2.0.37-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e4fb5ac86d8fe8151966814f6720996430462e633d225497566b3996966b9bdb"},
+ {file = "SQLAlchemy-2.0.37-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e56a139bfe136a22c438478a86f8204c1eb5eed36f4e15c4224e4b9db01cb3e4"},
+ {file = "SQLAlchemy-2.0.37-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f95fc8e3f34b5f6b3effb49d10ac97c569ec8e32f985612d9b25dd12d0d2e94"},
+ {file = "SQLAlchemy-2.0.37-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c505edd429abdfe3643fa3b2e83efb3445a34a9dc49d5f692dd087be966020e0"},
+ {file = "SQLAlchemy-2.0.37-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:12b0f1ec623cccf058cf21cb544f0e74656618165b083d78145cafde156ea7b6"},
+ {file = "SQLAlchemy-2.0.37-cp313-cp313-win32.whl", hash = "sha256:293f9ade06b2e68dd03cfb14d49202fac47b7bb94bffcff174568c951fbc7af2"},
+ {file = "SQLAlchemy-2.0.37-cp313-cp313-win_amd64.whl", hash = "sha256:d70f53a0646cc418ca4853da57cf3ddddbccb8c98406791f24426f2dd77fd0e2"},
+ {file = "SQLAlchemy-2.0.37-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:44f569d0b1eb82301b92b72085583277316e7367e038d97c3a1a899d9a05e342"},
+ {file = "SQLAlchemy-2.0.37-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2eae3423e538c10d93ae3e87788c6a84658c3ed6db62e6a61bb9495b0ad16bb"},
+ {file = "SQLAlchemy-2.0.37-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfff7be361048244c3aa0f60b5e63221c5e0f0e509f4e47b8910e22b57d10ae7"},
+ {file = "SQLAlchemy-2.0.37-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:5bc3339db84c5fb9130ac0e2f20347ee77b5dd2596ba327ce0d399752f4fce39"},
+ {file = "SQLAlchemy-2.0.37-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:84b9f23b0fa98a6a4b99d73989350a94e4a4ec476b9a7dfe9b79ba5939f5e80b"},
+ {file = "SQLAlchemy-2.0.37-cp37-cp37m-win32.whl", hash = "sha256:51bc9cfef83e0ac84f86bf2b10eaccb27c5a3e66a1212bef676f5bee6ef33ebb"},
+ {file = "SQLAlchemy-2.0.37-cp37-cp37m-win_amd64.whl", hash = "sha256:8e47f1af09444f87c67b4f1bb6231e12ba6d4d9f03050d7fc88df6d075231a49"},
+ {file = "SQLAlchemy-2.0.37-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6b788f14c5bb91db7f468dcf76f8b64423660a05e57fe277d3f4fad7b9dcb7ce"},
+ {file = "SQLAlchemy-2.0.37-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521ef85c04c33009166777c77e76c8a676e2d8528dc83a57836b63ca9c69dcd1"},
+ {file = "SQLAlchemy-2.0.37-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75311559f5c9881a9808eadbeb20ed8d8ba3f7225bef3afed2000c2a9f4d49b9"},
+ {file = "SQLAlchemy-2.0.37-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cce918ada64c956b62ca2c2af59b125767097ec1dca89650a6221e887521bfd7"},
+ {file = "SQLAlchemy-2.0.37-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9d087663b7e1feabea8c578d6887d59bb00388158e8bff3a76be11aa3f748ca2"},
+ {file = "SQLAlchemy-2.0.37-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cf95a60b36997dad99692314c4713f141b61c5b0b4cc5c3426faad570b31ca01"},
+ {file = "SQLAlchemy-2.0.37-cp38-cp38-win32.whl", hash = "sha256:d75ead7dd4d255068ea0f21492ee67937bd7c90964c8f3c2bea83c7b7f81b95f"},
+ {file = "SQLAlchemy-2.0.37-cp38-cp38-win_amd64.whl", hash = "sha256:74bbd1d0a9bacf34266a7907d43260c8d65d31d691bb2356f41b17c2dca5b1d0"},
+ {file = "SQLAlchemy-2.0.37-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:648ec5acf95ad59255452ef759054f2176849662af4521db6cb245263ae4aa33"},
+ {file = "SQLAlchemy-2.0.37-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:35bd2df269de082065d4b23ae08502a47255832cc3f17619a5cea92ce478b02b"},
+ {file = "SQLAlchemy-2.0.37-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f581d365af9373a738c49e0c51e8b18e08d8a6b1b15cc556773bcd8a192fa8b"},
+ {file = "SQLAlchemy-2.0.37-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82df02816c14f8dc9f4d74aea4cb84a92f4b0620235daa76dde002409a3fbb5a"},
+ {file = "SQLAlchemy-2.0.37-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94b564e38b344d3e67d2e224f0aec6ba09a77e4582ced41e7bfd0f757d926ec9"},
+ {file = "SQLAlchemy-2.0.37-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:955a2a765aa1bd81aafa69ffda179d4fe3e2a3ad462a736ae5b6f387f78bfeb8"},
+ {file = "SQLAlchemy-2.0.37-cp39-cp39-win32.whl", hash = "sha256:03f0528c53ca0b67094c4764523c1451ea15959bbf0a8a8a3096900014db0278"},
+ {file = "SQLAlchemy-2.0.37-cp39-cp39-win_amd64.whl", hash = "sha256:4b12885dc85a2ab2b7d00995bac6d967bffa8594123b02ed21e8eb2205a7584b"},
+ {file = "SQLAlchemy-2.0.37-py3-none-any.whl", hash = "sha256:a8998bf9f8658bd3839cbc44ddbe982955641863da0c1efe5b00c1ab4f5c16b1"},
+ {file = "sqlalchemy-2.0.37.tar.gz", hash = "sha256:12b28d99a9c14eaf4055810df1001557176716de0167b91026e648e65229bffb"},
]
[package.dependencies]
-greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""}
-typing-extensions = ">=4.2.0"
+greenlet = {version = "!=0.4.17", markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"}
+typing-extensions = ">=4.6.0"
[package.extras]
aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"]
-aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"]
+aioodbc = ["aioodbc", "greenlet (!=0.4.17)"]
+aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
asyncio = ["greenlet (!=0.4.17)"]
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"]
-mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"]
+mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"]
mssql = ["pyodbc"]
mssql-pymssql = ["pymssql"]
mssql-pyodbc = ["pyodbc"]
mypy = ["mypy (>=0.910)"]
mysql = ["mysqlclient (>=1.4.0)"]
mysql-connector = ["mysql-connector-python"]
-oracle = ["cx-oracle (>=7)"]
+oracle = ["cx_oracle (>=8)"]
oracle-oracledb = ["oracledb (>=1.0.1)"]
postgresql = ["psycopg2 (>=2.7)"]
postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
@@ -2140,7 +2544,7 @@ postgresql-psycopg2binary = ["psycopg2-binary"]
postgresql-psycopg2cffi = ["psycopg2cffi"]
postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
pymysql = ["pymysql"]
-sqlcipher = ["sqlcipher3-binary"]
+sqlcipher = ["sqlcipher3_binary"]
[[package]]
name = "tabulate"
@@ -2148,6 +2552,8 @@ version = "0.9.0"
description = "Pretty-print tabular data"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"},
{file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"},
@@ -2158,46 +2564,84 @@ widechars = ["wcwidth"]
[[package]]
name = "tomli"
-version = "2.0.1"
+version = "2.2.1"
description = "A lil' TOML parser"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
- {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+optional = true
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "(extra == \"coveralls\" or extra == \"pytest-cov\" or extra == \"pytest-mock\" or extra == \"pytest\" or extra == \"pytest-xdist\") and python_version < \"3.11\""
+files = [
+ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"},
+ {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"},
+ {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"},
+ {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"},
+ {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"},
+ {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"},
+ {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"},
+ {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"},
+ {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"},
+ {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"},
+ {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"},
+ {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"},
+ {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"},
+ {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"},
+ {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"},
+ {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"},
+ {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"},
+ {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"},
+ {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"},
+ {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"},
+ {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"},
+ {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"},
+ {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"},
+ {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"},
+ {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"},
+ {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"},
+ {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"},
+ {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"},
+ {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"},
+ {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"},
+ {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"},
+ {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"},
]
[[package]]
name = "typing-extensions"
-version = "4.8.0"
+version = "4.12.2"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"},
- {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"},
+ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
+ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
]
[[package]]
name = "tzdata"
-version = "2023.3"
+version = "2024.2"
description = "Provider of IANA time zone data"
optional = false
python-versions = ">=2"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"},
- {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"},
+ {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"},
+ {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"},
]
[[package]]
name = "urllib3"
-version = "2.2.2"
+version = "2.3.0"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"},
- {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"},
+ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"},
+ {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"},
]
[package.extras]
@@ -2212,28 +2656,32 @@ version = "2.4.3"
description = "A utility class for manipulating URLs."
optional = false
python-versions = "*"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "URLObject-2.4.3.tar.gz", hash = "sha256:47b2e20e6ab9c8366b2f4a3566b6ff4053025dad311c4bb71279bbcfa2430caa"},
]
[[package]]
name = "virtualenv"
-version = "20.24.6"
+version = "20.28.1"
description = "Virtual Python Environment builder"
-optional = false
-python-versions = ">=3.7"
+optional = true
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"pre-commit\""
files = [
- {file = "virtualenv-20.24.6-py3-none-any.whl", hash = "sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381"},
- {file = "virtualenv-20.24.6.tar.gz", hash = "sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af"},
+ {file = "virtualenv-20.28.1-py3-none-any.whl", hash = "sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb"},
+ {file = "virtualenv-20.28.1.tar.gz", hash = "sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329"},
]
[package.dependencies]
distlib = ">=0.3.7,<1"
filelock = ">=3.12.2,<4"
-platformdirs = ">=3.9.1,<4"
+platformdirs = ">=3.9.1,<5"
[package.extras]
-docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
+docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
[[package]]
@@ -2242,6 +2690,8 @@ version = "3.1.3"
description = "The comprehensive WSGI web application library."
optional = false
python-versions = ">=3.9"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"},
{file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"},
@@ -2259,6 +2709,8 @@ version = "3.0.0"
description = "Form validation and rendering for Python web development."
optional = false
python-versions = ">=3.6"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
files = [
{file = "WTForms-3.0.0-py3-none-any.whl", hash = "sha256:232dbb0094847dca2f45c72136b5ca1d5dca2a3e24ccd2229823b8b74b3c6698"},
{file = "WTForms-3.0.0.tar.gz", hash = "sha256:4abfbaa1d529a1d0ac927d44af8dbb9833afd910e56448a103f1893b0b176886"},
@@ -2272,20 +2724,36 @@ email = ["email-validator"]
[[package]]
name = "zipp"
-version = "3.19.2"
+version = "3.21.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "python_version < \"3.10\""
files = [
- {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"},
- {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"},
+ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"},
+ {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"},
]
[package.extras]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
+cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
-test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
+enabler = ["pytest-enabler (>=2.2)"]
+test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
+type = ["pytest-mypy"]
+
+[extras]
+coveralls = ["coveralls"]
+mock = ["mock"]
+pre-commit = ["pre-commit"]
+pytest = ["pytest"]
+pytest-cov = ["pytest-cov"]
+pytest-mock = ["pytest-mock"]
+pytest-xdist = ["pytest-xdist"]
+ruff = ["ruff"]
[metadata]
-lock-version = "2.0"
+lock-version = "2.1"
python-versions = ">=3.9,<3.13"
-content-hash = "7849ceb358ed7655482144407fa2b0fc2a11091906b696c957b283480415e9a1"
+content-hash = "5fe4fc06f95ac9872370b1a56a363a59902ba5eb3e28dd99885bfbca1780fd7e"
diff --git a/pyproject.toml b/pyproject.toml
index 2310eed895..014eabf8c2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -2,12 +2,11 @@
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
-[tool.poetry]
+[project]
name = "cg"
-version = "64.5.14"
+version = "67.0.3"
description = "Clinical Genomics command center"
-authors = ["Clinical Genomics "]
-readme = "README.md"
+readme = {file = "README.md", content-type = "text/markdown"}
homepage = "https://github.com/Clinical-Genomics/cg"
repository = "https://github.com/Clinical-Genomics/cg"
classifiers = [
@@ -20,71 +19,62 @@ include = [
"cg/**/*",
"tests/**/*"
]
+requires-python = ">=3.9,<3.13"
+dependencies = [
+"alembic",
+"SQLAlchemy",
+"PyMySQL",
+"click",
+"blinker",
+"CacheControl",
+"Flask-Admin",
+"Flask-CORS",
+"Flask-Dance",
+"Flask",
+"Flask-WTF",
+"WTForms == 3.0.0",
+"google-auth",
+"gunicorn",
+"requests",
+"werkzeug",
+"cachetools",
+"cryptography",
+"coloredlogs",
+"Jinja2",
+"lxml",
+"marshmallow",
+"MarkupSafe",
+"openpyxl",
+"packaging",
+"pandas",
+"paramiko",
+"petname",
+"psutil",
+"pydantic == 2.7.4",
+"python-dateutil",
+"PyYAML",
+"tabulate",
+"typing_extensions",
+"urllib3",
+"genologics",
+"housekeeper>=4.11.3",
+"pydantic-settings>=2.3.3",
+"email-validator>=2.2.0",
+"rich-click>=1.8.4",
+ ]
+[project.optional-dependencies]
+coveralls = ["coveralls"]
+mock = ["mock"]
+pre-commit = ["pre-commit"]
+pytest-cov = ["pytest-cov"]
+pytest-mock = ["pytest-mock"]
+pytest = ["pytest"]
+ruff = ["ruff"]
+pytest-xdist = ["pytest-xdist"]
-[tool.poetry.dependencies]
-python = ">=3.9,<3.13"
-# Database
-alembic = "*"
-SQLAlchemy = "*"
-PyMySQL = "*"
-# CLI
-click = "*"
-
-# Server
-blinker = "*"
-CacheControl = "*"
-Flask-Admin = "*"
-Flask-CORS = "*"
-Flask-Dance = "*"
-Flask = "*"
-Flask-WTF = "*"
-WTForms = "*"
-google-auth = "*"
-gunicorn = "*"
-requests = "*"
-werkzeug = "*"
-
-# Utils
-cachetools = "*"
-cryptography = "*"
-coloredlogs = "*"
-Jinja2 = ">=3.1.3"
-lxml = "*"
-marshmallow = "*"
-MarkupSafe = "*"
-openpyxl = "*"
-packaging = "*"
-pandas = "*"
-paramiko = "*"
-petname = "*"
-psutil = "*"
-pydantic = "*"
-python-dateutil = "*"
-PyYAML = "*"
-tabulate = "*"
-typing_extensions = "*"
-urllib3 = "*"
-
-# Apps
-genologics = "*"
-housekeeper = ">=4.11.3"
-pydantic-settings = "^2.3.3"
-email-validator = "^2.2.0"
-
-
-[tool.poetry.dev-dependencies]
-coveralls = "*"
-mock = "*"
-pre-commit = "*"
-pytest-cov = "*"
-pytest-mock = "*"
-pytest = "*"
-ruff = "*"
-pytest-xdist = "*"
-
-[tool.poetry.scripts]
+[project.scripts]
cg = "cg.cli.base:base"
diff --git a/tests/apps/hk/test__getattr__.py b/tests/apps/hk/test__getattr__.py
deleted file mode 100644
index 5a0a81ee79..0000000000
--- a/tests/apps/hk/test__getattr__.py
+++ /dev/null
@@ -1,20 +0,0 @@
-""" Test the __getattr__ override when calling private _store."""
-
-import logging
-
-from tests.mocks.hk_mock import MockHousekeeperAPI
-
-
-def test_calling_method_on_private_store_give_warning(housekeeper_api: MockHousekeeperAPI, caplog):
- """Test that we get a log warning for unwrapped methods."""
-
- # GIVEN an hk api and a method that is not wrapped
- caplog.set_level(logging.WARNING)
-
- # WHEN we call add_file
- housekeeper_api.get_files_before()
-
- # THEN the log should contain a warning that we have called something non-wrapped
- with caplog.at_level(logging.WARNING):
- assert "files_before" in caplog.text
- assert "HousekeeperAPI" in caplog.text
diff --git a/tests/apps/orderform/test_excel_orderform_parser.py b/tests/apps/orderform/test_excel_orderform_parser.py
index 348c889828..ad54a5ff5e 100644
--- a/tests/apps/orderform/test_excel_orderform_parser.py
+++ b/tests/apps/orderform/test_excel_orderform_parser.py
@@ -1,8 +1,8 @@
from pathlib import Path
from cg.apps.orderform.excel_orderform_parser import ExcelOrderformParser
+from cg.models.orders.constants import OrderType
from cg.models.orders.excel_sample import ExcelSample
-from cg.models.orders.order import OrderType
from cg.models.orders.orderform_schema import Orderform
diff --git a/tests/cli/demultiplex/test_validate_sample_sheet.py b/tests/cli/demultiplex/test_validate_sample_sheet.py
index 7ccb32daa2..c2874d965e 100644
--- a/tests/cli/demultiplex/test_validate_sample_sheet.py
+++ b/tests/cli/demultiplex/test_validate_sample_sheet.py
@@ -19,7 +19,7 @@ def test_validate_non_existing_sample_sheet(
sample_sheet: Path = Path("a_sample_sheet_that_does_not_exist.csv")
assert sample_sheet.exists() is False
- # WHEN validating the sample sheet
+ # WHEN validating the sample sheetresult = {Result}
result = cli_runner.invoke(
validate_sample_sheet,
[str(sample_sheet)],
@@ -28,8 +28,6 @@ def test_validate_non_existing_sample_sheet(
# THEN assert that it exits with a non-zero exit code
assert result.exit_code != EXIT_SUCCESS
- # THEN assert the correct information was communicated
- assert f"File '{sample_sheet.name}' does not exist" in result.output
def test_validate_sample_sheet_wrong_file_type(
diff --git a/tests/cli/store/test_store.py b/tests/cli/store/test_store.py
index 707232b559..1b4ca4c5c6 100644
--- a/tests/cli/store/test_store.py
+++ b/tests/cli/store/test_store.py
@@ -133,9 +133,10 @@ def test_store_flow_cell(
assert sample
# GIVEN samples objects on a flow cell
- with mocker.patch.object(
- Store, "get_samples_by_illumina_flow_cell", return_value=[sample]
- ), mocker.patch.object(CompressAPI, "add_decompressed_fastq", return_value=True):
+ with (
+ mocker.patch.object(Store, "get_samples_by_illumina_flow_cell", return_value=[sample]),
+ mocker.patch.object(CompressAPI, "add_decompressed_fastq", return_value=True),
+ ):
# WHEN running the store flow cell command
res = cli_runner.invoke(
store_illumina_run,
diff --git a/tests/cli/workflow/nf_analysis/test_cli_config_case.py b/tests/cli/workflow/nf_analysis/test_cli_config_case.py
index 87b9143c34..f3c5dcc834 100644
--- a/tests/cli/workflow/nf_analysis/test_cli_config_case.py
+++ b/tests/cli/workflow/nf_analysis/test_cli_config_case.py
@@ -24,7 +24,7 @@
@pytest.mark.parametrize(
"workflow",
- NEXTFLOW_WORKFLOWS,
+ NEXTFLOW_WORKFLOWS + [Workflow.NALLO],
)
def test_config_case_without_options(
cli_runner: CliRunner, workflow: Workflow, request: FixtureRequest
diff --git a/tests/cli/workflow/nf_analysis/test_cli_run.py b/tests/cli/workflow/nf_analysis/test_cli_run.py
index 08d28437be..c19e8c0ded 100644
--- a/tests/cli/workflow/nf_analysis/test_cli_run.py
+++ b/tests/cli/workflow/nf_analysis/test_cli_run.py
@@ -15,7 +15,7 @@
@pytest.mark.parametrize(
"workflow",
- NEXTFLOW_WORKFLOWS,
+ NEXTFLOW_WORKFLOWS + [Workflow.NALLO],
)
def test_run_without_options(cli_runner: CliRunner, workflow: Workflow, request: FixtureRequest):
"""Test run command for workflow without options."""
@@ -35,7 +35,7 @@ def test_run_without_options(cli_runner: CliRunner, workflow: Workflow, request:
@pytest.mark.parametrize(
"workflow",
- NEXTFLOW_WORKFLOWS,
+ NEXTFLOW_WORKFLOWS + [Workflow.NALLO],
)
def test_run_with_missing_case(
cli_runner: CliRunner,
@@ -64,7 +64,7 @@ def test_run_with_missing_case(
@pytest.mark.parametrize(
"workflow",
- NEXTFLOW_WORKFLOWS,
+ NEXTFLOW_WORKFLOWS + [Workflow.NALLO],
)
def test_run_case_without_samples(
cli_runner: CliRunner,
@@ -94,7 +94,7 @@ def test_run_case_without_samples(
@pytest.mark.parametrize(
"workflow",
- NEXTFLOW_WORKFLOWS,
+ NEXTFLOW_WORKFLOWS + [Workflow.NALLO],
)
def test_run_case_without_config_files(
cli_runner: CliRunner,
@@ -121,7 +121,7 @@ def test_run_case_without_config_files(
@pytest.mark.parametrize(
"workflow",
- NEXTFLOW_WORKFLOWS,
+ NEXTFLOW_WORKFLOWS + [Workflow.NALLO],
)
def test_run_case_from_start_dry_run(
cli_runner: CliRunner,
@@ -137,7 +137,7 @@ def test_run_case_from_start_dry_run(
case_id: str = request.getfixturevalue(f"{workflow}_case_id")
# GIVEN mocked config files
- request.getfixturevalue(f"{workflow}_mock_config")
+ request.getfixturevalue(f"{workflow}_config")
# WHEN invoking a command with dry-run specified
result = cli_runner.invoke(
@@ -155,7 +155,7 @@ def test_run_case_from_start_dry_run(
@pytest.mark.parametrize(
"workflow",
- NEXTFLOW_WORKFLOWS,
+ NEXTFLOW_WORKFLOWS + [Workflow.NALLO],
)
def test_run_case_with_revision_dry_run(
cli_runner: CliRunner,
@@ -171,7 +171,7 @@ def test_run_case_with_revision_dry_run(
case_id: str = request.getfixturevalue(f"{workflow}_case_id")
# GIVEN a mocked config
- request.getfixturevalue(f"{workflow}_mock_config")
+ request.getfixturevalue(f"{workflow}_config")
# WHEN invoking a command with dry-run and revision specified
result = cli_runner.invoke(
@@ -189,7 +189,7 @@ def test_run_case_with_revision_dry_run(
@pytest.mark.parametrize(
"workflow",
- NEXTFLOW_WORKFLOWS,
+ NEXTFLOW_WORKFLOWS + [Workflow.NALLO],
)
def test_resume_case_dry_run(
cli_runner: CliRunner,
@@ -206,7 +206,7 @@ def test_resume_case_dry_run(
case_id: str = request.getfixturevalue(f"{workflow}_case_id")
# GIVEN a mocked config
- # request.getfixturevalue(f"{workflow}_mock_config")
+ # request.getfixturevalue(f"{workflow}_config")
# WHEN invoking a command with dry-run and nf-tower-id specified
result = cli_runner.invoke(
@@ -225,13 +225,13 @@ def test_resume_case_dry_run(
@pytest.mark.parametrize(
"workflow",
- NEXTFLOW_WORKFLOWS,
+ NEXTFLOW_WORKFLOWS + [Workflow.NALLO],
)
def test_resume_case_with_missing_tower_id(
cli_runner: CliRunner,
workflow: Workflow,
caplog: LogCaptureFixture,
- raredisease_mock_config,
+ raredisease_config,
request: FixtureRequest,
):
"""Test resume command without providing NF-Tower ID and without existing Trailblazer config file."""
@@ -253,13 +253,13 @@ def test_resume_case_with_missing_tower_id(
@pytest.mark.parametrize(
"workflow",
- NEXTFLOW_WORKFLOWS,
+ NEXTFLOW_WORKFLOWS + [Workflow.NALLO],
)
def test_resume_using_nextflow_dry_run(
cli_runner: CliRunner,
workflow: Workflow,
caplog: LogCaptureFixture,
- raredisease_mock_config,
+ raredisease_config,
request: FixtureRequest,
):
"""Test command with case_id and config file using nextflow."""
diff --git a/tests/cli/workflow/nf_analysis/test_cli_start.py b/tests/cli/workflow/nf_analysis/test_cli_start.py
index 60749a351d..db0d27385c 100644
--- a/tests/cli/workflow/nf_analysis/test_cli_start.py
+++ b/tests/cli/workflow/nf_analysis/test_cli_start.py
@@ -16,7 +16,7 @@
@pytest.mark.parametrize(
"workflow",
- NEXTFLOW_WORKFLOWS,
+ NEXTFLOW_WORKFLOWS + [Workflow.NALLO],
)
def test_start(
cli_runner: CliRunner,
diff --git a/tests/cli/workflow/nf_analysis/test_cli_workflow_base.py b/tests/cli/workflow/nf_analysis/test_cli_workflow_base.py
index c8a127ece7..aa6cdcfc3e 100644
--- a/tests/cli/workflow/nf_analysis/test_cli_workflow_base.py
+++ b/tests/cli/workflow/nf_analysis/test_cli_workflow_base.py
@@ -10,7 +10,7 @@
@pytest.mark.parametrize(
"workflow",
- NEXTFLOW_WORKFLOWS + [Workflow.JASEN],
+ NEXTFLOW_WORKFLOWS + [Workflow.JASEN] + [Workflow.NALLO],
)
def test_workflow_no_args(cli_runner: CliRunner, workflow: Workflow, request):
"""Test to see that workflow is added and prints help when no subcommand is specified."""
diff --git a/tests/conftest.py b/tests/conftest.py
index 03206c4271..34bcdbe368 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -32,18 +32,19 @@
from cg.constants import FileExtensions, SequencingFileTag, Workflow
from cg.constants.constants import CaseActions, CustomerId, FileFormat, GenomeVersion, Strandedness
from cg.constants.gene_panel import GenePanelMasterList
-from cg.constants.housekeeper_tags import HK_DELIVERY_REPORT_TAG
+from cg.constants.housekeeper_tags import HK_DELIVERY_REPORT_TAG, AlignmentFileTag
from cg.constants.priority import SlurmQos
from cg.constants.scout import ScoutExportFileName
from cg.constants.sequencing import SequencingPlatform
from cg.constants.subject import Sex
-from cg.constants.tb import AnalysisTypes
+from cg.constants.tb import AnalysisType
from cg.io.controller import ReadFile, WriteFile
from cg.io.json import read_json, write_json
from cg.io.yaml import read_yaml, write_yaml
from cg.meta.tar.tar import TarAPI
from cg.meta.transfer.external_data import ExternalDataAPI
from cg.meta.workflow.jasen import JasenAnalysisAPI
+from cg.meta.workflow.nallo import NalloAnalysisAPI
from cg.meta.workflow.raredisease import RarediseaseAnalysisAPI
from cg.meta.workflow.rnafusion import RnafusionAnalysisAPI
from cg.meta.workflow.taxprofiler import TaxprofilerAnalysisAPI
@@ -51,6 +52,7 @@
from cg.models import CompressionData
from cg.models.cg_config import CGConfig, PDCArchivingDirectory
from cg.models.downsample.downsample_data import DownsampleData
+from cg.models.nallo.nallo import NalloSampleSheetHeaders
from cg.models.raredisease.raredisease import RarediseaseParameters, RarediseaseSampleSheetHeaders
from cg.models.rnafusion.rnafusion import RnafusionParameters, RnafusionSampleSheetEntry
from cg.models.run_devices.illumina_run_directory_data import IlluminaRunDirectoryData
@@ -59,6 +61,7 @@
from cg.services.deliver_files.rsync.service import DeliveryRsyncService
from cg.services.illumina.backup.encrypt_service import IlluminaRunEncryptionService
from cg.services.illumina.data_transfer.data_transfer_service import IlluminaDataTransferService
+from cg.services.orders.storing.constants import MAF_ORDER_ID
from cg.store.database import create_all_tables, drop_all_tables, initialize_database
from cg.store.models import (
Application,
@@ -118,9 +121,12 @@
"tests.fixture_plugins.observations_fixtures.observations_api_fixtures",
"tests.fixture_plugins.observations_fixtures.observations_input_files_fixtures",
"tests.fixture_plugins.orders_fixtures.order_form_fixtures",
- "tests.fixture_plugins.orders_fixtures.order_store_service_fixtures",
"tests.fixture_plugins.orders_fixtures.order_to_submit_fixtures",
- "tests.fixture_plugins.orders_fixtures.status_data_fixtures",
+ "tests.fixture_plugins.orders_fixtures.order_fixtures",
+ "tests.fixture_plugins.orders_fixtures.path_fixtures",
+ "tests.fixture_plugins.orders_fixtures.services_fixtures",
+ "tests.fixture_plugins.orders_fixtures.store_fixtures",
+ "tests.fixture_plugins.orders_fixtures.store_service_fixtures",
"tests.fixture_plugins.pacbio_fixtures.context_fixtures",
"tests.fixture_plugins.pacbio_fixtures.dto_fixtures",
"tests.fixture_plugins.pacbio_fixtures.file_data_fixtures",
@@ -656,9 +662,14 @@ def madeline_api(madeline_output: Path) -> MockMadelineAPI:
@pytest.fixture(scope="session")
-def ticket_id() -> str:
+def ticket_id_as_int() -> int:
+ return 123456
+
+
+@pytest.fixture(scope="session")
+def ticket_id(ticket_id_as_int: int) -> str:
"""Return a ticket number for testing."""
- return "123456"
+ return str(ticket_id_as_int)
@pytest.fixture
@@ -735,12 +746,6 @@ def apps_dir(fixtures_dir: Path) -> Path:
return Path(fixtures_dir, "apps")
-@pytest.fixture(scope="session")
-def cgweb_orders_dir(fixtures_dir: Path) -> Path:
- """Return the path to the cgweb_orders dir."""
- return Path(fixtures_dir, "cgweb_orders")
-
-
@pytest.fixture(scope="session")
def data_dir(fixtures_dir: Path) -> Path:
"""Return the path to the data dir."""
@@ -1220,7 +1225,13 @@ def hermes_process() -> ProcessMock:
@pytest.fixture(name="hermes_api")
def hermes_api(hermes_process: ProcessMock) -> HermesApi:
"""Return a Hermes API with a mocked process."""
- hermes_config = {"hermes": {"binary_path": "/bin/true"}}
+ hermes_config = {
+ "hermes": {
+ "binary_path": "/bin/true",
+ "container_mount_volume": "a_str",
+ "container_path": "/singularity_cache",
+ }
+ }
hermes_api = HermesApi(config=hermes_config)
hermes_api.process = hermes_process
return hermes_api
@@ -1427,6 +1438,12 @@ def metagenomics_application_tag() -> str:
return "METPCFR030"
+@pytest.fixture
+def wgs_long_read_application_tag() -> str:
+ """Return the raw data bam application tag."""
+ return "LWPBELB070"
+
+
@pytest.fixture
def store() -> Generator[Store, None, None]:
"""Return a CG store."""
@@ -1597,7 +1614,7 @@ def base_store(
),
store.add_application(
tag=apptag_rna,
- prep_category="tgs",
+ prep_category="wts",
description="RNA seq, poly-A based priming",
percent_kth=80,
percent_reads_guaranteed=75,
@@ -1642,6 +1659,9 @@ def base_store(
organism = store.add_organism("C. jejuni", "C. jejuni")
store.session.add(organism)
+
+ order: Order = Order(customer_id=1, id=MAF_ORDER_ID, ticket_id="100000000")
+ store.add_multiple_items_to_store([order])
store.session.commit()
yield store
@@ -1889,6 +1909,7 @@ def context_config(
conda_binary: Path,
balsamic_dir: Path,
microsalt_dir: Path,
+ nallo_dir: Path,
raredisease_dir: Path,
rnafusion_dir: Path,
taxprofiler_dir: Path,
@@ -2033,7 +2054,7 @@ def context_config(
"upload_password": "pass",
"submitter": "s.submitter",
},
- "hermes": {"binary_path": "hermes"},
+ "hermes": {"binary_path": "hermes", "container_path": "/singularity_cache"},
"housekeeper": {"database": hk_uri, "root": str(housekeeper_dir)},
"lims": {
"host": "https://lims.scilifelab.se",
@@ -2081,6 +2102,27 @@ def context_config(
"conda_env": "S_mutant",
"root": str(mip_dir),
},
+ "nallo": {
+ "binary_path": nextflow_binary.as_posix(),
+ "compute_env": "nf_tower_compute_env",
+ "conda_binary": conda_binary.as_posix(),
+ "conda_env": "S_nallo",
+ "platform": str(nf_analysis_platform_config_path),
+ "params": str(nf_analysis_pipeline_params_path),
+ "config": str(nf_analysis_pipeline_config_path),
+ "resources": str(nf_analysis_pipeline_resource_optimisation_path),
+ "launch_directory": Path("path", "to", "launchdir").as_posix(),
+ "workflow_bin_path": Path("workflow", "path").as_posix(),
+ "profile": "myprofile",
+ "references": Path("path", "to", "references").as_posix(),
+ "revision": "dev",
+ "root": str(nallo_dir),
+ "slurm": {
+ "account": "development",
+ "mail_user": email_address,
+ },
+ "tower_workflow": "nallo",
+ },
"raredisease": {
"binary_path": nextflow_binary.as_posix(),
"compute_env": "nf_tower_compute_env",
@@ -2360,6 +2402,16 @@ def store_with_users(store: Store, helpers: StoreHelpers) -> Generator[Store, No
yield store
+@pytest.fixture
+def customer_without_users(store_with_users: Store):
+ return store_with_users.add_customer(
+ internal_id="internal_id",
+ name="some_name",
+ invoice_address="some_address",
+ invoice_reference="some_reference",
+ )
+
+
@pytest.fixture
def store_with_cases_and_customers(
store: Store, helpers: StoreHelpers
@@ -2466,12 +2518,160 @@ def mock_fastq_files(fastq_forward_read_path: Path, fastq_reverse_read_path: Pat
return [fastq_forward_read_path, fastq_reverse_read_path]
+@pytest.fixture(scope="session")
+def bam_unmapped_read_paths(housekeeper_dir: Path) -> Path:
+ """Path to existing bam read file."""
+ bam_unmapped_read_path = Path(
+ housekeeper_dir, "m00000_000000_000000_s4.hifi_reads.bc2021"
+ ).with_suffix(f"{AlignmentFileTag.BAM}")
+ with open(bam_unmapped_read_path, "wb") as wh:
+ wh.write(
+ b"1f 8b 08 04 00 00 00 00 00 ff 06 00 42 43 02 00 1b 00 03 00 00 00 00 00 00 00 00 00"
+ )
+ return bam_unmapped_read_path
+
+
@pytest.fixture(scope="session")
def sequencing_platform() -> str:
"""Return a default sequencing platform."""
return SequencingPlatform.ILLUMINA
+# Nallo fixtures
+@pytest.fixture(scope="session")
+def nallo_case_id() -> str:
+ """Returns a nallo case id."""
+ return "nallo_case_two_samples"
+
+
+@pytest.fixture(scope="function")
+def nallo_context(
+ cg_context: CGConfig,
+ helpers: StoreHelpers,
+ nf_analysis_housekeeper: HousekeeperAPI,
+ trailblazer_api: MockTB,
+ hermes_api: HermesApi,
+ cg_dir: Path,
+ nallo_case_id: str,
+ sample_id: str,
+ sample_name: str,
+ another_sample_name: str,
+ father_sample_id: str,
+ no_sample_case_id: str,
+ wgs_long_read_application_tag: str,
+) -> CGConfig:
+ """Context to use in CLI."""
+ cg_context.housekeeper_api_ = nf_analysis_housekeeper
+ cg_context.trailblazer_api_ = trailblazer_api
+ cg_context.meta_apis["analysis_api"] = NalloAnalysisAPI(config=cg_context)
+ status_db: Store = cg_context.status_db
+
+ # Create ERROR case with NO SAMPLES
+ helpers.add_case(status_db, internal_id=no_sample_case_id, name=no_sample_case_id)
+
+ # Create textbook case with two samples
+ nallo_case_two_samples: Case = helpers.add_case(
+ store=status_db,
+ internal_id=nallo_case_id,
+ name=nallo_case_id,
+ data_analysis=Workflow.NALLO,
+ )
+
+ nallo_sample_one: Sample = helpers.add_sample(
+ status_db,
+ internal_id=sample_id,
+ name=sample_name,
+ last_sequenced_at=datetime.now(),
+ application_tag=wgs_long_read_application_tag,
+ reference_genome=GenomeVersion.HG38,
+ )
+
+ another_nallo_sample: Sample = helpers.add_sample(
+ status_db,
+ internal_id=father_sample_id,
+ name=another_sample_name,
+ last_sequenced_at=datetime.now(),
+ application_tag=wgs_long_read_application_tag,
+ reference_genome=GenomeVersion.HG38,
+ )
+
+ helpers.add_relationship(
+ status_db,
+ case=nallo_case_two_samples,
+ sample=nallo_sample_one,
+ )
+
+ helpers.add_relationship(
+ status_db,
+ case=nallo_case_two_samples,
+ sample=another_nallo_sample,
+ )
+ return cg_context
+
+
+@pytest.fixture(scope="function")
+def nallo_dir(tmpdir_factory, apps_dir: Path) -> str:
+ """Return the path to the nallo apps dir."""
+ nallo_dir = tmpdir_factory.mktemp("nallo")
+ return Path(nallo_dir).absolute().as_posix()
+
+
+@pytest.fixture(scope="function")
+def nallo_config(nallo_dir: Path, nallo_case_id: str) -> None:
+ """Create Nallo samplesheet.csv file for testing"""
+ Path.mkdir(Path(nallo_dir, nallo_case_id), parents=True, exist_ok=True)
+ Path(nallo_dir, nallo_case_id, f"{nallo_case_id}_samplesheet").with_suffix(
+ FileExtensions.CSV
+ ).touch(exist_ok=True)
+
+
+@pytest.fixture(scope="function")
+def nallo_nexflow_config_file_path(nallo_dir, nallo_case_id) -> Path:
+ """Path to config file."""
+ return Path(nallo_dir, nallo_case_id, f"{nallo_case_id}_nextflow_config").with_suffix(
+ FileExtensions.JSON
+ )
+
+
+@pytest.fixture(scope="function")
+def nallo_params_file_path(nallo_dir, nallo_case_id) -> Path:
+ """Path to parameters file."""
+ return Path(nallo_dir, nallo_case_id, f"{nallo_case_id}_params_file").with_suffix(
+ FileExtensions.YAML
+ )
+
+
+@pytest.fixture(scope="function")
+def nallo_sample_sheet_content(
+ sample_id: str,
+ nallo_case_id: str,
+ bam_unmapped_read_paths: Path,
+) -> str:
+ """Return the expected sample sheet content for Nallo."""
+ headers: str = ",".join(NalloSampleSheetHeaders.list())
+ row: str = ",".join(
+ [
+ nallo_case_id,
+ sample_id,
+ bam_unmapped_read_paths.as_posix(),
+ nallo_case_id,
+ "0",
+ "0",
+ "2",
+ "2",
+ ]
+ )
+ return "\n".join([headers, row])
+
+
+@pytest.fixture(scope="function")
+def nallo_sample_sheet_path(nallo_dir, nallo_case_id) -> Path:
+ """Path to sample sheet."""
+ return Path(nallo_dir, nallo_case_id, f"{nallo_case_id}_samplesheet").with_suffix(
+ FileExtensions.CSV
+ )
+
+
# Raredisease fixtures
@pytest.fixture(scope="function")
def raredisease_dir(tmpdir_factory, apps_dir: Path) -> str:
@@ -2581,7 +2781,7 @@ def raredisease_parameters_default(
outdir=Path(raredisease_dir, raredisease_case_id),
target_bed_file=bed_version_file_name,
skip_germlinecnvcaller=False,
- analysis_type=AnalysisTypes.WES,
+ analysis_type=AnalysisType.WES,
save_mapped_as_cram=True,
vcfanno_extra_resources=str(
Path(raredisease_dir, raredisease_case_id + ScoutExportFileName.MANAGED_VARIANTS)
@@ -2734,7 +2934,7 @@ def mock_deliverable(
@pytest.fixture(scope="function")
-def raredisease_mock_config(raredisease_dir: Path, raredisease_case_id: str) -> None:
+def raredisease_config(raredisease_dir: Path, raredisease_case_id: str) -> None:
"""Create samplesheet.csv file for testing"""
Path.mkdir(Path(raredisease_dir, raredisease_case_id), parents=True, exist_ok=True)
Path(raredisease_dir, raredisease_case_id, f"{raredisease_case_id}_samplesheet").with_suffix(
@@ -3256,7 +3456,7 @@ def rnafusion_mock_analysis_finish(
@pytest.fixture(scope="function")
-def rnafusion_mock_config(rnafusion_dir: Path, rnafusion_case_id: str) -> None:
+def rnafusion_config(rnafusion_dir: Path, rnafusion_case_id: str) -> None:
"""Create samplesheet.csv file for testing"""
Path.mkdir(Path(rnafusion_dir, rnafusion_case_id), parents=True, exist_ok=True)
Path(rnafusion_dir, rnafusion_case_id, f"{rnafusion_case_id}_samplesheet.csv").with_suffix(
@@ -3309,7 +3509,7 @@ def tomte_gene_panel_path(tomte_dir, tomte_case_id) -> Path:
@pytest.fixture(scope="function")
-def tomte_mock_config(tomte_dir: Path, tomte_case_id: str) -> None:
+def tomte_config(tomte_dir: Path, tomte_case_id: str) -> None:
"""Create Tomte samplesheet.csv file for testing."""
Path.mkdir(Path(tomte_dir, tomte_case_id), parents=True, exist_ok=True)
Path(tomte_dir, tomte_case_id, f"{tomte_case_id}_samplesheet").with_suffix(
@@ -3914,15 +4114,6 @@ def taxprofiler_mock_analysis_finish(
)
-@pytest.fixture(scope="function")
-def taxprofiler_mock_config(taxprofiler_dir: Path, taxprofiler_case_id: str) -> None:
- """Create CSV sample sheet file for testing."""
- Path.mkdir(Path(taxprofiler_dir, taxprofiler_case_id), parents=True, exist_ok=True)
- Path(taxprofiler_dir, taxprofiler_case_id, f"{taxprofiler_case_id}_samplesheet").with_suffix(
- FileExtensions.CSV
- ).touch(exist_ok=True)
-
-
@pytest.fixture(scope="function")
def taxprofiler_deliverables_response_data(
create_multiqc_html_file,
@@ -4049,7 +4240,6 @@ def store_with_case_and_sample_with_reads(
customer_id=case.customer_id,
ticket_id=case.latest_ticket,
order_date=case.ordered_at,
- workflow=case.data_analysis,
)
case.orders.append(order)
for sample_internal_id in [downsample_sample_internal_id_1, downsample_sample_internal_id_2]:
diff --git a/tests/fixture_plugins/delivery_fixtures/bundle_fixtures.py b/tests/fixture_plugins/delivery_fixtures/bundle_fixtures.py
index 4196daa0ba..489a3f5a99 100644
--- a/tests/fixture_plugins/delivery_fixtures/bundle_fixtures.py
+++ b/tests/fixture_plugins/delivery_fixtures/bundle_fixtures.py
@@ -100,3 +100,59 @@ def hk_delivery_case_bundle(
},
]
return case_hk_bundle
+
+
+@pytest.fixture
+def hk_delivery_case_bundle_fohm_upload(
+ case_hk_bundle_no_files: dict[str, Any],
+ case_id: str,
+ sample_id: str,
+ another_sample_id: str,
+ delivery_report_file: Path,
+ delivery_case_fastq_file: Path,
+ delivery_another_case_fastq_file: Path,
+ delivery_consensus_sample_file: Path,
+ delivery_another_consensus_sample_file: Path,
+ delivery_vcf_report_file: Path,
+ delivery_another_vcf_report_file: Path,
+) -> dict:
+ case_hk_bundle: dict[str, Any] = deepcopy(case_hk_bundle_no_files)
+ case_hk_bundle["name"] = case_id
+ case_hk_bundle["files"] = [
+ {
+ "archive": False,
+ "path": delivery_report_file.as_posix(),
+ "tags": [HK_DELIVERY_REPORT_TAG, case_id],
+ },
+ {
+ "archive": False,
+ "path": delivery_case_fastq_file.as_posix(),
+ "tags": ["fastq", sample_id],
+ },
+ {
+ "archive": False,
+ "path": delivery_another_case_fastq_file.as_posix(),
+ "tags": ["fastq", another_sample_id],
+ },
+ {
+ "archive": False,
+ "path": delivery_consensus_sample_file.as_posix(),
+ "tags": ["consensus-sample", sample_id],
+ },
+ {
+ "archive": False,
+ "path": delivery_another_consensus_sample_file.as_posix(),
+ "tags": ["consensus-sample", another_sample_id],
+ },
+ {
+ "archive": False,
+ "path": delivery_vcf_report_file.as_posix(),
+ "tags": ["vcf-report", sample_id],
+ },
+ {
+ "archive": False,
+ "path": delivery_another_vcf_report_file.as_posix(),
+ "tags": ["vcf-report", another_sample_id],
+ },
+ ]
+ return case_hk_bundle
diff --git a/tests/fixture_plugins/delivery_fixtures/context_fixtures.py b/tests/fixture_plugins/delivery_fixtures/context_fixtures.py
index 3c217896c0..95a8e576be 100644
--- a/tests/fixture_plugins/delivery_fixtures/context_fixtures.py
+++ b/tests/fixture_plugins/delivery_fixtures/context_fixtures.py
@@ -7,7 +7,7 @@
from cg.apps.housekeeper.hk import HousekeeperAPI
from cg.constants import DataDelivery, Workflow
from cg.models.cg_config import CGConfig
-from cg.store.models import Case, Sample
+from cg.store.models import Case, Sample, Order
from cg.store.store import Store
from tests.store_helpers import StoreHelpers
@@ -21,16 +21,33 @@ def delivery_housekeeper_api(
hk_delivery_case_bundle: dict[str, Any],
) -> HousekeeperAPI:
"""Delivery API Housekeeper context."""
+ hk_api: HousekeeperAPI = real_housekeeper_api
+ helpers.ensure_hk_bundle(store=hk_api, bundle_data=hk_delivery_sample_bundle, include=True)
helpers.ensure_hk_bundle(
- store=real_housekeeper_api, bundle_data=hk_delivery_sample_bundle, include=True
+ store=hk_api, bundle_data=hk_delivery_another_sample_bundle, include=True
)
+ helpers.ensure_hk_bundle(store=hk_api, bundle_data=hk_delivery_case_bundle, include=True)
+ return hk_api
+
+
+@pytest.fixture
+def delivery_fohm_upload_housekeeper_api(
+ real_housekeeper_api: HousekeeperAPI,
+ helpers: StoreHelpers,
+ hk_delivery_case_bundle_fohm_upload: dict[str, Any],
+ hk_delivery_sample_bundle: dict[str, Any],
+ hk_delivery_another_sample_bundle: dict[str, Any],
+) -> HousekeeperAPI:
+ """Delivery API Housekeeper context."""
+ hk_api: HousekeeperAPI = real_housekeeper_api
+ helpers.ensure_hk_bundle(store=hk_api, bundle_data=hk_delivery_sample_bundle, include=True)
helpers.ensure_hk_bundle(
- store=real_housekeeper_api, bundle_data=hk_delivery_another_sample_bundle, include=True
+ store=hk_api, bundle_data=hk_delivery_another_sample_bundle, include=True
)
helpers.ensure_hk_bundle(
- store=real_housekeeper_api, bundle_data=hk_delivery_case_bundle, include=True
+ store=hk_api, bundle_data=hk_delivery_case_bundle_fohm_upload, include=True
)
- return real_housekeeper_api
+ return hk_api
@pytest.fixture
@@ -112,7 +129,8 @@ def delivery_store_microsalt(
data_analysis=Workflow.MICROSALT,
data_delivery=DataDelivery.FASTQ_QC,
)
-
+ order: Order = helpers.add_order(store=status_db, customer_id=case.customer.id, ticket_id=1)
+ case.orders.append(order)
# MicroSALT samples
sample: Sample = helpers.add_sample(
store=status_db,
@@ -143,6 +161,68 @@ def delivery_store_microsalt(
return status_db
+@pytest.fixture
+def delivery_store_mutant(
+ cg_context: CGConfig,
+ helpers: StoreHelpers,
+ case_id: str,
+ no_sample_case_id: str,
+ case_name: str,
+ sample_id: str,
+ another_sample_id: str,
+ sample_id_not_enough_reads: str,
+ total_sequenced_reads_pass: int,
+ total_sequenced_reads_not_pass: int,
+ sample_name: str,
+ another_sample_name: str,
+ microbial_application_tag: str,
+) -> Store:
+ """Delivery API StatusDB context for Mutant."""
+ status_db: Store = cg_context.status_db
+
+ # Error case without samples
+ helpers.add_case(store=status_db, internal_id=no_sample_case_id, name=no_sample_case_id)
+
+ # Mutant case with fastq-analysis as data delivery
+ case: Case = helpers.add_case(
+ store=status_db,
+ internal_id=case_id,
+ name=case_name,
+ data_analysis=Workflow.MUTANT,
+ data_delivery=DataDelivery.FASTQ_ANALYSIS,
+ )
+ order: Order = helpers.add_order(store=status_db, customer_id=case.customer.id, ticket_id=1)
+ case.orders.append(order)
+ # Mutant samples
+ sample: Sample = helpers.add_sample(
+ store=status_db,
+ application_tag=microbial_application_tag,
+ internal_id=sample_id,
+ name=sample_name,
+ reads=total_sequenced_reads_pass,
+ )
+
+ another_sample: Sample = helpers.add_sample(
+ store=status_db,
+ application_tag=microbial_application_tag,
+ internal_id=another_sample_id,
+ name=another_sample_name,
+ reads=total_sequenced_reads_pass,
+ )
+
+ sample_not_enough_reads: Sample = helpers.add_sample(
+ store=status_db,
+ application_tag=microbial_application_tag,
+ internal_id=sample_id_not_enough_reads,
+ reads=total_sequenced_reads_not_pass,
+ )
+
+ for sample_mutant in [sample, another_sample, sample_not_enough_reads]:
+ helpers.add_relationship(store=status_db, case=case, sample=sample_mutant)
+
+ return status_db
+
+
@pytest.fixture
def delivery_context_balsamic(
cg_context: CGConfig,
diff --git a/tests/fixture_plugins/delivery_fixtures/delivery_files_models_fixtures.py b/tests/fixture_plugins/delivery_fixtures/delivery_files_models_fixtures.py
index a252c4791c..2d69ac742c 100644
--- a/tests/fixture_plugins/delivery_fixtures/delivery_files_models_fixtures.py
+++ b/tests/fixture_plugins/delivery_fixtures/delivery_files_models_fixtures.py
@@ -1,3 +1,4 @@
+import os
from pathlib import Path
import pytest
@@ -15,6 +16,7 @@
DeliveryMetaData,
SampleFile,
)
+from cg.services.deliver_files.file_formatter.destination.models import FormattedFile
from cg.store.models import Case
from cg.store.store import Store
@@ -89,6 +91,78 @@ def expected_bam_delivery_files(
return DeliveryFiles(delivery_data=delivery_meta_data, case_files=[], sample_files=sample_files)
+@pytest.fixture()
+def expected_bam_delivery_files_single_sample(
+ expected_bam_delivery_files: DeliveryFiles, sample_id: str
+) -> DeliveryFiles:
+ expected_bam_delivery_files.sample_files = [
+ sample_file
+ for sample_file in expected_bam_delivery_files.sample_files
+ if sample_file.sample_id == sample_id
+ ]
+ return expected_bam_delivery_files
+
+
+@pytest.fixture
+def expected_fohm_delivery_files(
+ delivery_fohm_upload_housekeeper_api: HousekeeperAPI,
+ case_id: str,
+ case_name: str,
+ sample_id: str,
+ sample_name: str,
+ another_sample_id: str,
+ another_sample_name: str,
+ delivery_store_mutant: Store,
+) -> DeliveryFiles:
+ """Return the expected fastq delivery files."""
+ sample_info: list[tuple[str, str]] = [
+ (sample_id, sample_name),
+ (another_sample_id, another_sample_name),
+ ]
+ sample_files: list[SampleFile] = [
+ SampleFile(
+ case_id=case_id,
+ sample_id=sample[0],
+ sample_name=sample[1],
+ file_path=delivery_fohm_upload_housekeeper_api.get_files_from_latest_version(
+ bundle_name=sample[0], tags=[SequencingFileTag.FASTQ]
+ )[0].full_path,
+ )
+ for sample in sample_info
+ ]
+ case_sample_info: list[tuple[str, str, str]] = [
+ (sample_id, sample_name, "consensus-sample"),
+ (sample_id, sample_name, "vcf-report"),
+ (another_sample_id, another_sample_name, "consensus-sample"),
+ (another_sample_id, another_sample_name, "vcf-report"),
+ ]
+ case_sample_files: list[SampleFile] = [
+ SampleFile(
+ case_id=case_id,
+ sample_id=sample[0],
+ sample_name=sample[1],
+ file_path=delivery_fohm_upload_housekeeper_api.get_files_from_latest_version_containing_tags(
+ bundle_name=case_id, tags=[{sample[2], sample[0]}]
+ )[
+ 0
+ ].full_path,
+ )
+ for sample in case_sample_info
+ ]
+
+ case: Case = delivery_store_mutant.get_case_by_internal_id(case_id)
+ delivery_meta_data = DeliveryMetaData(
+ case_id=case.internal_id,
+ customer_internal_id=case.customer.internal_id,
+ ticket_id=case.latest_ticket,
+ )
+ return DeliveryFiles(
+ delivery_data=delivery_meta_data,
+ case_files=[],
+ sample_files=case_sample_files + sample_files,
+ )
+
+
@pytest.fixture
def expected_analysis_delivery_files(
delivery_housekeeper_api: HousekeeperAPI,
@@ -152,7 +226,7 @@ def expected_moved_fastq_delivery_files(
INBOX_NAME,
delivery_files.delivery_data.ticket_id,
)
- delivery_files.delivery_data.customer_ticket_inbox = inbox_dir_path
+ delivery_files.delivery_data.delivery_path = inbox_dir_path
new_sample_files: list[SampleFile] = swap_file_paths_with_inbox_paths(
file_models=delivery_files.sample_files, inbox_dir_path=inbox_dir_path
)
@@ -175,7 +249,7 @@ def expected_moved_analysis_delivery_files(
INBOX_NAME,
delivery_files.delivery_data.ticket_id,
)
- delivery_files.delivery_data.customer_ticket_inbox = inbox_dir_path
+ delivery_files.delivery_data.delivery_path = inbox_dir_path
new_case_files: list[CaseFile] = swap_file_paths_with_inbox_paths(
file_models=delivery_files.case_files, inbox_dir_path=inbox_dir_path
)
@@ -214,23 +288,65 @@ def expected_moved_analysis_case_delivery_files(
@pytest.fixture
-def fastq_concatenation_sample_files(tmp_path: Path) -> list[SampleFile]:
- some_ticket: str = "some_ticket"
- fastq_paths: list[Path] = [
- Path(tmp_path, some_ticket, "S1_1_R1_1.fastq.gz"),
- Path(tmp_path, some_ticket, "S1_2_R1_1.fastq.gz"),
- Path(tmp_path, some_ticket, "S1_1_R2_1.fastq.gz"),
- Path(tmp_path, some_ticket, "S1_2_R2_1.fastq.gz"),
- ]
- return [
- SampleFile(
- sample_id="S1",
- case_id="Case1",
- sample_name="Sample1",
- file_path=fastq_path,
+def fastq_concatenation_sample_files(
+ tmp_path: Path, expected_fastq_delivery_files: DeliveryFiles
+) -> list[SampleFile]:
+ """
+ Return a list of sample files that are to be concatenated.
+ """
+ inbox = Path(
+ expected_fastq_delivery_files.delivery_data.customer_internal_id,
+ INBOX_NAME,
+ expected_fastq_delivery_files.delivery_data.ticket_id,
+ )
+ sample_data = [("Sample_ID1", "Sample_Name1"), ("Sample_ID2", "Sample_Name2")]
+ sample_files = []
+ for sample_id, sample_name in sample_data:
+ fastq_paths: list[Path] = [
+ Path(tmp_path, inbox, f"FC_{sample_id}_L001_R1_001.fastq.gz"),
+ Path(tmp_path, inbox, f"FC_{sample_id}_L002_R1_001.fastq.gz"),
+ Path(tmp_path, inbox, f"FC_{sample_id}_L001_R2_001.fastq.gz"),
+ Path(tmp_path, inbox, f"FC_{sample_id}_L002_R2_001.fastq.gz"),
+ ]
+
+ sample_files.extend(
+ [
+ SampleFile(
+ sample_id=sample_id,
+ case_id="Case1",
+ sample_name=sample_name,
+ file_path=fastq_path,
+ )
+ for fastq_path in fastq_paths
+ ]
)
- for fastq_path in fastq_paths
- ]
+ return sample_files
+
+
+@pytest.fixture
+def fastq_concatenation_sample_files_flat(tmp_path: Path) -> list[SampleFile]:
+ sample_data = [("Sample_ID2", "Sample_Name2"), ("Sample_ID1", "Sample_Name1")]
+ sample_files = []
+ for sample_id, sample_name in sample_data:
+ fastq_paths: list[Path] = [
+ Path(tmp_path, f"FC_{sample_id}_L001_R1_001.fastq.gz"),
+ Path(tmp_path, f"FC_{sample_id}_L002_R1_001.fastq.gz"),
+ Path(tmp_path, f"FC_{sample_id}_L001_R2_001.fastq.gz"),
+ Path(tmp_path, f"FC_{sample_id}_L002_R2_001.fastq.gz"),
+ ]
+
+ sample_files.extend(
+ [
+ SampleFile(
+ sample_id=sample_id,
+ case_id="Case1",
+ sample_name=sample_name,
+ file_path=fastq_path,
+ )
+ for fastq_path in fastq_paths
+ ]
+ )
+ return sample_files
def swap_file_paths_with_inbox_paths(
@@ -243,3 +359,57 @@ def swap_file_paths_with_inbox_paths(
new_file_model.file_path = Path(inbox_dir_path, file_model.file_path.name)
new_file_models.append(new_file_model)
return new_file_models
+
+
+@pytest.fixture
+def lims_naming_metadata() -> str:
+ return "01_SE100_"
+
+
+@pytest.fixture
+def expected_mutant_formatted_files(
+ expected_concatenated_fastq_formatted_files, lims_naming_metadata
+) -> list[FormattedFile]:
+ unique_combinations = []
+ for formatted_file in expected_concatenated_fastq_formatted_files:
+ formatted_file.original_path = formatted_file.formatted_path
+ formatted_file.formatted_path = Path(
+ formatted_file.formatted_path.parent,
+ f"{lims_naming_metadata}{formatted_file.formatted_path.name}",
+ )
+ if formatted_file not in unique_combinations:
+ unique_combinations.append(formatted_file)
+ return unique_combinations
+
+
+@pytest.fixture
+def mutant_moved_files(fastq_concatenation_sample_files) -> list[SampleFile]:
+ return fastq_concatenation_sample_files
+
+
+@pytest.fixture
+def expected_upload_files(expected_analysis_delivery_files: DeliveryFiles):
+ return expected_analysis_delivery_files
+
+
+@pytest.fixture
+def expected_moved_upload_files(expected_analysis_delivery_files: DeliveryFiles, tmp_path: Path):
+ delivery_files = DeliveryFiles(**expected_analysis_delivery_files.model_dump())
+ delivery_files.delivery_data.delivery_path = tmp_path
+ new_case_files: list[CaseFile] = swap_file_paths_with_inbox_paths(
+ file_models=delivery_files.case_files, inbox_dir_path=tmp_path
+ )
+ new_sample_files: list[SampleFile] = swap_file_paths_with_inbox_paths(
+ file_models=delivery_files.sample_files, inbox_dir_path=tmp_path
+ )
+
+ return DeliveryFiles(
+ delivery_data=delivery_files.delivery_data,
+ case_files=new_case_files,
+ sample_files=new_sample_files,
+ )
+
+
+@pytest.fixture
+def empty_sample() -> None:
+ return None
diff --git a/tests/fixture_plugins/delivery_fixtures/delivery_formatted_files_fixtures.py b/tests/fixture_plugins/delivery_fixtures/delivery_formatted_files_fixtures.py
index 89b614b584..872d39dd36 100644
--- a/tests/fixture_plugins/delivery_fixtures/delivery_formatted_files_fixtures.py
+++ b/tests/fixture_plugins/delivery_fixtures/delivery_formatted_files_fixtures.py
@@ -1,9 +1,13 @@
from pathlib import Path
+from unittest.mock import Mock
import pytest
-from cg.services.deliver_files.file_fetcher.models import DeliveryFiles, SampleFile
-from cg.services.deliver_files.file_formatter.models import FormattedFile
+from cg.services.deliver_files.file_fetcher.models import DeliveryFiles
+from cg.services.deliver_files.file_formatter.destination.models import FormattedFile
+from cg.services.deliver_files.file_formatter.files.concatenation_service import (
+ SampleFileConcatenationFormatter,
+)
@pytest.fixture
@@ -42,6 +46,22 @@ def expected_formatted_analysis_sample_files(
return formatted_files
+@pytest.fixture
+def expected_flat_formatted_analysis_sample_files(
+ expected_moved_analysis_delivery_files: DeliveryFiles,
+) -> list[FormattedFile]:
+ formatted_files: list[FormattedFile] = []
+ for sample_file in expected_moved_analysis_delivery_files.sample_files:
+ replaced_sample_file_name: str = sample_file.file_path.name.replace(
+ sample_file.sample_id, sample_file.sample_name
+ )
+ formatted_file_path = Path(sample_file.file_path.parent, replaced_sample_file_name)
+ formatted_files.append(
+ FormattedFile(original_path=sample_file.file_path, formatted_path=formatted_file_path)
+ )
+ return formatted_files
+
+
@pytest.fixture
def expected_formatted_fastq_sample_files(
expected_moved_fastq_delivery_files: DeliveryFiles,
@@ -69,10 +89,11 @@ def expected_concatenated_fastq_formatted_files(
replaced_sample_file_name: str = sample_file.file_path.name.replace(
sample_file.sample_id, sample_file.sample_name
)
- replaced_sample_file_name = replaced_sample_file_name.replace("1_R1_1", "1")
- replaced_sample_file_name = replaced_sample_file_name.replace("2_R1_1", "1")
- replaced_sample_file_name = replaced_sample_file_name.replace("1_R2_1", "2")
- replaced_sample_file_name = replaced_sample_file_name.replace("2_R2_1", "2")
+ replaced_sample_file_name = replaced_sample_file_name.replace("L001_R1_001", "1")
+ replaced_sample_file_name = replaced_sample_file_name.replace("L002_R1_001", "1")
+ replaced_sample_file_name = replaced_sample_file_name.replace("L001_R2_001", "2")
+ replaced_sample_file_name = replaced_sample_file_name.replace("L002_R2_001", "2")
+ replaced_sample_file_name = replaced_sample_file_name.replace("FC_", "")
formatted_file_path = Path(
sample_file.file_path.parent, sample_file.sample_name, replaced_sample_file_name
)
@@ -82,6 +103,27 @@ def expected_concatenated_fastq_formatted_files(
return formatted_files
+@pytest.fixture
+def expected_concatenated_fastq_flat_formatted_files(
+ fastq_concatenation_sample_files_flat,
+) -> list[FormattedFile]:
+ formatted_files: list[FormattedFile] = []
+ for sample_file in fastq_concatenation_sample_files_flat:
+ replaced_sample_file_name: str = sample_file.file_path.name.replace(
+ sample_file.sample_id, sample_file.sample_name
+ )
+ replaced_sample_file_name = replaced_sample_file_name.replace("L001_R1_001", "1")
+ replaced_sample_file_name = replaced_sample_file_name.replace("L002_R1_001", "1")
+ replaced_sample_file_name = replaced_sample_file_name.replace("L001_R2_001", "2")
+ replaced_sample_file_name = replaced_sample_file_name.replace("L002_R2_001", "2")
+ replaced_sample_file_name = replaced_sample_file_name.replace("FC_", "")
+ formatted_file_path = Path(sample_file.file_path.parent, replaced_sample_file_name)
+ formatted_files.append(
+ FormattedFile(original_path=sample_file.file_path, formatted_path=formatted_file_path)
+ )
+ return formatted_files
+
+
@pytest.fixture
def empty_case_files() -> list:
return []
diff --git a/tests/fixture_plugins/delivery_fixtures/delivery_services_fixtures.py b/tests/fixture_plugins/delivery_fixtures/delivery_services_fixtures.py
index 5d81346d36..60d898ed81 100644
--- a/tests/fixture_plugins/delivery_fixtures/delivery_services_fixtures.py
+++ b/tests/fixture_plugins/delivery_fixtures/delivery_services_fixtures.py
@@ -1,12 +1,14 @@
import pytest
from cg.apps.housekeeper.hk import HousekeeperAPI
+from cg.services.deliver_files.file_fetcher.analysis_raw_data_service import (
+ RawDataAndAnalysisDeliveryFileFetcher,
+)
+from cg.services.deliver_files.file_formatter.destination.base_service import BaseDeliveryFormatter
from cg.services.deliver_files.tag_fetcher.bam_service import (
BamDeliveryTagsFetcher,
)
-from cg.services.fastq_concatenation_service.fastq_concatenation_service import (
- FastqConcatenationService,
-)
+from cg.services.deliver_files.tag_fetcher.fohm_upload_service import FOHMUploadTagsFetcher
from cg.services.deliver_files.tag_fetcher.sample_and_case_service import (
SampleAndCaseDeliveryTagsFetcher,
)
@@ -16,19 +18,15 @@
from cg.services.deliver_files.file_fetcher.raw_data_service import (
RawDataDeliveryFileFetcher,
)
-from cg.services.deliver_files.file_formatter.service import (
- DeliveryFileFormatter,
-)
-from cg.services.deliver_files.file_formatter.utils.case_service import (
+from cg.services.deliver_files.file_formatter.files.case_service import (
CaseFileFormatter,
)
-from cg.services.deliver_files.file_formatter.utils.sample_concatenation_service import (
- SampleFileConcatenationFormatter,
-)
-from cg.services.deliver_files.file_formatter.utils.sample_service import (
+from cg.services.deliver_files.file_formatter.files.sample_service import (
SampleFileFormatter,
- FileManagingService,
- SampleFileNameFormatter,
+ FileManager,
+)
+from cg.services.deliver_files.file_formatter.path_name.nested_structure import (
+ NestedStructurePathFormatter,
)
from cg.store.store import Store
@@ -89,6 +87,20 @@ def bam_data_delivery_service_no_housekeeper_bundle(
)
+@pytest.fixture
+def fohm_data_delivery_service(
+ delivery_fohm_upload_housekeeper_api: HousekeeperAPI,
+ delivery_store_mutant: Store,
+) -> RawDataAndAnalysisDeliveryFileFetcher:
+ """Fixture to get an instance of FetchFastqDeliveryFilesService."""
+ tag_service = FOHMUploadTagsFetcher()
+ return RawDataAndAnalysisDeliveryFileFetcher(
+ hk_api=delivery_fohm_upload_housekeeper_api,
+ status_db=delivery_store_mutant,
+ tags_fetcher=tag_service,
+ )
+
+
@pytest.fixture
def analysis_delivery_service(
delivery_housekeeper_api: HousekeeperAPI,
@@ -118,11 +130,14 @@ def analysis_delivery_service_no_housekeeper_bundle(
@pytest.fixture
-def generic_delivery_file_formatter() -> DeliveryFileFormatter:
+def generic_delivery_file_formatter() -> BaseDeliveryFormatter:
"""Fixture to get an instance of GenericDeliveryFileFormatter."""
- return DeliveryFileFormatter(
+ return BaseDeliveryFormatter(
sample_file_formatter=SampleFileFormatter(
- file_manager=FileManagingService(), file_name_formatter=SampleFileNameFormatter()
+ file_manager=FileManager(), path_name_formatter=NestedStructurePathFormatter()
+ ),
+ case_file_formatter=CaseFileFormatter(
+ file_manager=FileManager(),
+ path_name_formatter=NestedStructurePathFormatter(),
),
- case_file_formatter=CaseFileFormatter(),
)
diff --git a/tests/fixture_plugins/delivery_fixtures/path_fixtures.py b/tests/fixture_plugins/delivery_fixtures/path_fixtures.py
index 06b77d6959..b114d0dccf 100644
--- a/tests/fixture_plugins/delivery_fixtures/path_fixtures.py
+++ b/tests/fixture_plugins/delivery_fixtures/path_fixtures.py
@@ -9,7 +9,18 @@
@pytest.fixture
def delivery_fastq_file(tmp_path: Path, sample_id: str) -> Path:
- file = Path(tmp_path, f"{sample_id}_R1_001{FileExtensions.FASTQ_GZ}")
+ file = Path(tmp_path, f"FC_{sample_id}_L001_R1_001{FileExtensions.FASTQ_GZ}")
+ file.touch()
+ return file
+
+
+@pytest.fixture
+def delivery_case_fastq_file(tmp_path: Path, sample_id: str) -> Path:
+ """
+ This represents a fastq file stored on a case bundle. Mutant stored file like this in the past.
+ This fixture servers the purpose to make sure these files are not fetched during delivery.
+ """
+ file = Path(tmp_path, f"{sample_id}_concat_{FileExtensions.FASTQ_GZ}")
file.touch()
return file
@@ -23,7 +34,18 @@ def delivery_bam_file(tmp_path: Path, sample_id: str) -> Path:
@pytest.fixture
def delivery_another_fastq_file(tmp_path: Path, another_sample_id: str) -> Path:
- file = Path(tmp_path, f"{another_sample_id}_R1_001{FileExtensions.FASTQ_GZ}")
+ file = Path(tmp_path, f"FC_{another_sample_id}L001_R1_001{FileExtensions.FASTQ_GZ}")
+ file.touch()
+ return file
+
+
+@pytest.fixture
+def delivery_another_case_fastq_file(tmp_path: Path, another_sample_id: str) -> Path:
+ """
+ This represents a fastq file stored on a case bundle. Mutant stored file like this in the past.
+ This fixture servers the purpose to make sure these files are not fetched during delivery.
+ """
+ file = Path(tmp_path, f"{another_sample_id}_concat_{FileExtensions.FASTQ_GZ}")
file.touch()
return file
@@ -73,3 +95,31 @@ def delivery_another_cram_file(tmp_path: Path, another_sample_id: str) -> Path:
@pytest.fixture
def delivery_ticket_dir_path(tmp_path: Path, ticket_id: str) -> Path:
return Path(tmp_path, ticket_id)
+
+
+@pytest.fixture
+def delivery_consensus_sample_file(tmp_path: Path, sample_id: str) -> Path:
+ file = Path(tmp_path, f"{sample_id}_consensus_sample{FileExtensions.VCF}")
+ file.touch()
+ return file
+
+
+@pytest.fixture
+def delivery_another_consensus_sample_file(tmp_path: Path, another_sample_id: str) -> Path:
+ file = Path(tmp_path, f"{another_sample_id}_consensus_sample{FileExtensions.VCF}")
+ file.touch()
+ return file
+
+
+@pytest.fixture
+def delivery_vcf_report_file(tmp_path: Path, sample_id: str) -> Path:
+ file = Path(tmp_path, f"{sample_id}_vcf_report{FileExtensions.VCF}")
+ file.touch()
+ return file
+
+
+@pytest.fixture
+def delivery_another_vcf_report_file(tmp_path: Path, another_sample_id: str) -> Path:
+ file = Path(tmp_path, f"{another_sample_id}_vcf_report{FileExtensions.VCF}")
+ file.touch()
+ return file
diff --git a/tests/fixture_plugins/delivery_report_fixtures/context_fixtures.py b/tests/fixture_plugins/delivery_report_fixtures/context_fixtures.py
index 2831f3fddf..dd6d5b06bd 100644
--- a/tests/fixture_plugins/delivery_report_fixtures/context_fixtures.py
+++ b/tests/fixture_plugins/delivery_report_fixtures/context_fixtures.py
@@ -3,7 +3,7 @@
from datetime import datetime
from pathlib import Path
-import click
+import rich_click as click
import pytest
from pytest_mock import MockFixture
diff --git a/tests/fixture_plugins/fohm/fohm_fixtures.py b/tests/fixture_plugins/fohm/fohm_fixtures.py
index 17531570e3..eef17e9f4c 100644
--- a/tests/fixture_plugins/fohm/fohm_fixtures.py
+++ b/tests/fixture_plugins/fohm/fohm_fixtures.py
@@ -109,7 +109,7 @@ def fohm_upload_api(
cg_context: CGConfig, mocker: MockFixture, helpers: StoreHelpers
) -> FOHMUploadAPI:
"""FOHM upload API fixture."""
- fohm_upload_api = FOHMUploadAPI(cg_context)
+ fohm_upload_api = FOHMUploadAPI(config=cg_context)
# Mock getting Sample object from StatusDB
mocker.patch.object(
diff --git a/tests/fixture_plugins/orders_fixtures/order_fixtures.py b/tests/fixture_plugins/orders_fixtures/order_fixtures.py
new file mode 100644
index 0000000000..3b290c0aa1
--- /dev/null
+++ b/tests/fixture_plugins/orders_fixtures/order_fixtures.py
@@ -0,0 +1,137 @@
+"""Fixtures for orders parsed into their respective models."""
+
+import pytest
+
+from cg.services.orders.validation.workflows.balsamic.models.order import BalsamicOrder
+from cg.services.orders.validation.workflows.fastq.models.order import FastqOrder
+from cg.services.orders.validation.workflows.fluffy.models.order import FluffyOrder
+from cg.services.orders.validation.workflows.metagenome.models.order import MetagenomeOrder
+from cg.services.orders.validation.workflows.microbial_fastq.models.order import MicrobialFastqOrder
+from cg.services.orders.validation.workflows.microsalt.models.order import MicrosaltOrder
+from cg.services.orders.validation.workflows.mip_dna.models.order import MipDnaOrder
+from cg.services.orders.validation.workflows.mip_rna.models.order import MipRnaOrder
+from cg.services.orders.validation.workflows.mutant.models.order import MutantOrder
+from cg.services.orders.validation.workflows.pacbio_long_read.models.order import PacbioOrder
+from cg.services.orders.validation.workflows.rml.models.order import RmlOrder
+from cg.services.orders.validation.workflows.rna_fusion.models.order import RnaFusionOrder
+from cg.services.orders.validation.workflows.taxprofiler.models.order import TaxprofilerOrder
+from cg.services.orders.validation.workflows.tomte.models.order import TomteOrder
+
+
+@pytest.fixture
+def balsamic_order(balsamic_order_to_submit: dict) -> BalsamicOrder:
+ balsamic_order_to_submit["user_id"] = 1
+ balsamic_order = BalsamicOrder.model_validate(balsamic_order_to_submit)
+ balsamic_order._generated_ticket_id = 123456
+ for case_index, sample_index, sample in balsamic_order.enumerated_new_samples:
+ sample._generated_lims_id = f"ACC{case_index}-{sample_index}"
+ return balsamic_order
+
+
+@pytest.fixture
+def fastq_order(fastq_order_to_submit: dict) -> FastqOrder:
+ fastq_order = FastqOrder.model_validate(fastq_order_to_submit)
+ fastq_order._generated_ticket_id = 123456
+ return fastq_order
+
+
+@pytest.fixture
+def fluffy_order(fluffy_order_to_submit: dict, ticket_id_as_int: int) -> FluffyOrder:
+ """Parse Fluffy order example."""
+ fluffy_order = FluffyOrder.model_validate(fluffy_order_to_submit)
+ fluffy_order._generated_ticket_id = ticket_id_as_int
+ return fluffy_order
+
+
+@pytest.fixture
+def metagenome_order(
+ metagenome_order_to_submit: dict,
+ ticket_id_as_int: int,
+) -> MetagenomeOrder:
+ """Parse metagenome order example."""
+ order = MetagenomeOrder.model_validate(metagenome_order_to_submit)
+ order._generated_ticket_id = ticket_id_as_int
+ return order
+
+
+@pytest.fixture
+def microbial_fastq_order(
+ microbial_fastq_order_to_submit: dict, ticket_id_as_int: int
+) -> MicrobialFastqOrder:
+ order = MicrobialFastqOrder.model_validate(microbial_fastq_order_to_submit)
+ order._generated_ticket_id = ticket_id_as_int
+ return order
+
+
+@pytest.fixture
+def microsalt_order(microbial_order_to_submit: dict) -> MicrosaltOrder:
+ order = MicrosaltOrder.model_validate(microbial_order_to_submit)
+ order._generated_ticket_id = 123456
+ return order
+
+
+@pytest.fixture
+def mip_dna_order(mip_dna_order_to_submit: dict) -> MipDnaOrder:
+ mip_dna_order_to_submit["user_id"] = 1
+ mip_dna_order = MipDnaOrder.model_validate(mip_dna_order_to_submit)
+ for case_index, sample_index, sample in mip_dna_order.enumerated_new_samples:
+ sample._generated_lims_id = f"ACC{case_index}-{sample_index}"
+ mip_dna_order._generated_ticket_id = 123456
+ return mip_dna_order
+
+
+@pytest.fixture
+def mip_rna_order(mip_rna_order_to_submit: dict) -> MipRnaOrder:
+ mip_rna_order_to_submit["user_id"] = 1
+ mip_rna_order = MipRnaOrder.model_validate(mip_rna_order_to_submit)
+ for case_index, sample_index, sample in mip_rna_order.enumerated_new_samples:
+ sample._generated_lims_id = f"ACC{case_index}-{sample_index}"
+ mip_rna_order._generated_ticket_id = 123456
+ return mip_rna_order
+
+
+@pytest.fixture
+def mutant_order(sarscov2_order_to_submit: dict, ticket_id_as_int: int) -> MutantOrder:
+ """Parse mutant order example."""
+ order = MutantOrder.model_validate(sarscov2_order_to_submit)
+ order._generated_ticket_id = ticket_id_as_int
+ return order
+
+
+@pytest.fixture
+def pacbio_order(pacbio_order_to_submit: dict, ticket_id_as_int: int) -> PacbioOrder:
+ order = PacbioOrder.model_validate(pacbio_order_to_submit)
+ order._generated_ticket_id = ticket_id_as_int
+ return order
+
+
+@pytest.fixture
+def rml_order(rml_order_to_submit: dict, ticket_id_as_int: int) -> RmlOrder:
+ """Parse rml order example."""
+ rml_order = RmlOrder.model_validate(rml_order_to_submit)
+ rml_order._generated_ticket_id = ticket_id_as_int
+ return rml_order
+
+
+@pytest.fixture
+def rnafusion_order(rnafusion_order_to_submit: dict) -> RnaFusionOrder:
+ """Parse RNAFusion order example."""
+ rnafusion_order = RnaFusionOrder.model_validate(rnafusion_order_to_submit)
+ rnafusion_order._generated_ticket_id = 123456
+ return rnafusion_order
+
+
+@pytest.fixture
+def taxprofiler_order(taxprofiler_order_to_submit: dict, ticket_id_as_int: int) -> TaxprofilerOrder:
+ """Parse Taxprofiler order example."""
+ taxprofiler_order = TaxprofilerOrder.model_validate(taxprofiler_order_to_submit)
+ taxprofiler_order._generated_ticket_id = ticket_id_as_int
+ return taxprofiler_order
+
+
+@pytest.fixture
+def tomte_order(tomte_order_to_submit: dict, ticket_id_as_int: int) -> TomteOrder:
+ """Parse Tomte order example."""
+ tomte_order = TomteOrder.model_validate(tomte_order_to_submit)
+ tomte_order._generated_ticket_id = ticket_id_as_int
+ return tomte_order
diff --git a/tests/fixture_plugins/orders_fixtures/order_store_service_fixtures.py b/tests/fixture_plugins/orders_fixtures/order_store_service_fixtures.py
deleted file mode 100644
index e5c188c18f..0000000000
--- a/tests/fixture_plugins/orders_fixtures/order_store_service_fixtures.py
+++ /dev/null
@@ -1,63 +0,0 @@
-import pytest
-
-from cg.services.orders.order_lims_service.order_lims_service import OrderLimsService
-from cg.services.orders.store_order_services.store_case_order import StoreCaseOrderService
-from cg.services.orders.store_order_services.store_fastq_order_service import StoreFastqOrderService
-from cg.services.orders.store_order_services.store_metagenome_order import (
- StoreMetagenomeOrderService,
-)
-from cg.services.orders.store_order_services.store_microbial_fastq_order_service import (
- StoreMicrobialFastqOrderService,
-)
-from cg.services.orders.store_order_services.store_microbial_order import StoreMicrobialOrderService
-from cg.services.orders.store_order_services.store_pacbio_order_service import (
- StorePacBioOrderService,
-)
-from cg.services.orders.store_order_services.store_pool_order import StorePoolOrderService
-from cg.store.store import Store
-from tests.mocks.limsmock import MockLimsAPI
-
-
-@pytest.fixture
-def store_generic_order_service(base_store: Store, lims_api: MockLimsAPI) -> StoreCaseOrderService:
- return StoreCaseOrderService(status_db=base_store, lims_service=OrderLimsService(lims_api))
-
-
-@pytest.fixture
-def store_pool_order_service(base_store: Store, lims_api: MockLimsAPI) -> StorePoolOrderService:
- return StorePoolOrderService(status_db=base_store, lims_service=OrderLimsService(lims_api))
-
-
-@pytest.fixture
-def store_fastq_order_service(base_store: Store, lims_api: MockLimsAPI) -> StoreFastqOrderService:
- return StoreFastqOrderService(status_db=base_store, lims_service=OrderLimsService(lims_api))
-
-
-@pytest.fixture
-def store_pacbio_order_service(base_store: Store, lims_api: MockLimsAPI) -> StorePacBioOrderService:
- return StorePacBioOrderService(status_db=base_store, lims_service=OrderLimsService(lims_api))
-
-
-@pytest.fixture
-def store_metagenome_order_service(
- base_store: Store, lims_api: MockLimsAPI
-) -> StoreMetagenomeOrderService:
- return StoreMetagenomeOrderService(
- status_db=base_store, lims_service=OrderLimsService(lims_api)
- )
-
-
-@pytest.fixture
-def store_microbial_order_service(
- base_store: Store, lims_api: MockLimsAPI
-) -> StoreMicrobialOrderService:
- return StoreMicrobialOrderService(status_db=base_store, lims_service=OrderLimsService(lims_api))
-
-
-@pytest.fixture
-def store_microbial_fastq_order_service(
- base_store: Store, lims_api: MockLimsAPI
-) -> StoreMicrobialFastqOrderService:
- return StoreMicrobialFastqOrderService(
- status_db=base_store, lims_service=OrderLimsService(lims_api)
- )
diff --git a/tests/fixture_plugins/orders_fixtures/order_to_submit_fixtures.py b/tests/fixture_plugins/orders_fixtures/order_to_submit_fixtures.py
index 4d71b98234..69eb36aeee 100644
--- a/tests/fixture_plugins/orders_fixtures/order_to_submit_fixtures.py
+++ b/tests/fixture_plugins/orders_fixtures/order_to_submit_fixtures.py
@@ -1,16 +1,18 @@
+"""Fixtures for orders parsed from JSON files into dictionaries."""
+
from pathlib import Path
import pytest
from cg.constants.constants import FileFormat
from cg.io.controller import ReadFile
-from cg.models.orders.constants import OrderType
-from cg.models.orders.order import OrderIn
+
+# Valid orders
@pytest.fixture(scope="session")
def balsamic_order_to_submit(cgweb_orders_dir: Path) -> dict:
- """Load an example cancer order."""
+ """Load an example Balsamic order."""
return ReadFile.get_content_from_file(
file_format=FileFormat.JSON, file_path=Path(cgweb_orders_dir, "balsamic.json")
)
@@ -18,39 +20,47 @@ def balsamic_order_to_submit(cgweb_orders_dir: Path) -> dict:
@pytest.fixture(scope="session")
def fastq_order_to_submit(cgweb_orders_dir) -> dict:
- """Load an example FASTQ order."""
+ """Load an example Fastq order."""
return ReadFile.get_content_from_file(
file_format=FileFormat.JSON, file_path=Path(cgweb_orders_dir, "fastq.json")
)
@pytest.fixture(scope="session")
-def metagenome_order_to_submit(cgweb_orders_dir: Path) -> dict:
- """Load an example metagenome order."""
+def fluffy_order_to_submit(cgweb_orders_dir: Path) -> dict:
+ """Load an example Fluffy order."""
return ReadFile.get_content_from_file(
- file_format=FileFormat.JSON, file_path=Path(cgweb_orders_dir, "metagenome.json")
+ file_format=FileFormat.JSON, file_path=Path(cgweb_orders_dir, "fluffy.json")
)
@pytest.fixture(scope="session")
-def microbial_order_to_submit(cgweb_orders_dir: Path) -> dict:
- """Load an example microbial order."""
+def metagenome_order_to_submit(cgweb_orders_dir: Path) -> dict:
+ """Load an example Metagenome order."""
return ReadFile.get_content_from_file(
- file_format=FileFormat.JSON, file_path=Path(cgweb_orders_dir, "microsalt.json")
+ file_format=FileFormat.JSON, file_path=Path(cgweb_orders_dir, "metagenome.json")
)
-@pytest.fixture
+@pytest.fixture(scope="session")
def microbial_fastq_order_to_submit(cgweb_orders_dir: Path) -> dict:
- """Load an example microbial order."""
+ """Load an example Microbial fastq order."""
return ReadFile.get_content_from_file(
file_format=FileFormat.JSON, file_path=Path(cgweb_orders_dir, "microbial_fastq.json")
)
@pytest.fixture(scope="session")
-def mip_order_to_submit(cgweb_orders_dir: Path) -> dict:
- """Load an example MIP order."""
+def microbial_order_to_submit(cgweb_orders_dir: Path) -> dict:
+ """Load an example Microsalt order."""
+ return ReadFile.get_content_from_file(
+ file_format=FileFormat.JSON, file_path=Path(cgweb_orders_dir, "microsalt.json")
+ )
+
+
+@pytest.fixture(scope="session")
+def mip_dna_order_to_submit(cgweb_orders_dir: Path) -> dict:
+ """Load an example MIP-DNA order."""
return ReadFile.get_content_from_file(
file_format=FileFormat.JSON, file_path=Path(cgweb_orders_dir, "mip.json")
)
@@ -58,7 +68,7 @@ def mip_order_to_submit(cgweb_orders_dir: Path) -> dict:
@pytest.fixture(scope="session")
def mip_rna_order_to_submit(cgweb_orders_dir: Path) -> dict:
- """Load an example RNA order."""
+ """Load an example MIP-RNA order."""
return ReadFile.get_content_from_file(
file_format=FileFormat.JSON, file_path=Path(cgweb_orders_dir, "mip_rna.json")
)
@@ -73,71 +83,51 @@ def pacbio_order_to_submit(cgweb_orders_dir) -> dict:
@pytest.fixture(scope="session")
-def rnafusion_order_to_submit(cgweb_orders_dir: Path) -> dict:
- """Load an example RNA order."""
+def rml_order_to_submit(cgweb_orders_dir: Path) -> dict:
+ """Load an example RML order."""
return ReadFile.get_content_from_file(
- file_format=FileFormat.JSON, file_path=Path(cgweb_orders_dir, "rnafusion.json")
+ file_format=FileFormat.JSON, file_path=Path(cgweb_orders_dir, "rml.json")
)
@pytest.fixture(scope="session")
-def rml_order_to_submit(cgweb_orders_dir: Path) -> dict:
- """Load an example RML order."""
+def rnafusion_order_to_submit(cgweb_orders_dir: Path) -> dict:
+ """Load an example RNA Fusion order."""
return ReadFile.get_content_from_file(
- file_format=FileFormat.JSON, file_path=Path(cgweb_orders_dir, "rml.json")
+ file_format=FileFormat.JSON, file_path=Path(cgweb_orders_dir, "rnafusion.json")
)
-@pytest.fixture(scope="session")
+@pytest.fixture
def sarscov2_order_to_submit(cgweb_orders_dir: Path) -> dict:
- """Load an example sarscov2 order."""
+ """Load an example Mutant order."""
return ReadFile.get_content_from_file(
file_format=FileFormat.JSON, file_path=Path(cgweb_orders_dir, "sarscov2.json")
)
+@pytest.fixture(scope="session")
+def taxprofiler_order_to_submit(cgweb_orders_dir: Path) -> dict:
+ """Load an example Taxprofiler order."""
+ return ReadFile.get_content_from_file(
+ file_format=FileFormat.JSON, file_path=Path(cgweb_orders_dir, "taxprofiler.json")
+ )
+
+
@pytest.fixture(scope="session")
def tomte_order_to_submit(cgweb_orders_dir: Path) -> dict:
- """Load an example TOMTE order."""
+ """Load an example Tomte order."""
return ReadFile.get_content_from_file(
file_format=FileFormat.JSON, file_path=Path(cgweb_orders_dir, "tomte.json")
)
+# Invalid orders
+
+
@pytest.fixture(scope="session")
-def all_orders_to_submit(
- balsamic_order_to_submit: dict,
- fastq_order_to_submit: dict,
- metagenome_order_to_submit: dict,
- microbial_order_to_submit: dict,
- mip_order_to_submit: dict,
- mip_rna_order_to_submit: dict,
- pacbio_order_to_submit: dict,
- rml_order_to_submit: dict,
- rnafusion_order_to_submit: dict,
- sarscov2_order_to_submit: dict,
-) -> dict[str, OrderIn]:
- """Returns a dict of parsed order for each order type."""
- return {
- OrderType.BALSAMIC: OrderIn.parse_obj(balsamic_order_to_submit, project=OrderType.BALSAMIC),
- OrderType.FASTQ: OrderIn.parse_obj(fastq_order_to_submit, project=OrderType.FASTQ),
- OrderType.FLUFFY: OrderIn.parse_obj(rml_order_to_submit, project=OrderType.FLUFFY),
- OrderType.METAGENOME: OrderIn.parse_obj(
- metagenome_order_to_submit, project=OrderType.METAGENOME
- ),
- OrderType.MICROSALT: OrderIn.parse_obj(
- microbial_order_to_submit, project=OrderType.MICROSALT
- ),
- OrderType.MIP_DNA: OrderIn.parse_obj(mip_order_to_submit, project=OrderType.MIP_DNA),
- OrderType.MIP_RNA: OrderIn.parse_obj(mip_rna_order_to_submit, project=OrderType.MIP_RNA),
- OrderType.PACBIO_LONG_READ: OrderIn.parse_obj(
- pacbio_order_to_submit, project=OrderType.PACBIO_LONG_READ
- ),
- OrderType.RML: OrderIn.parse_obj(rml_order_to_submit, project=OrderType.RML),
- OrderType.RNAFUSION: OrderIn.parse_obj(
- rnafusion_order_to_submit, project=OrderType.RNAFUSION
- ),
- OrderType.SARS_COV_2: OrderIn.parse_obj(
- sarscov2_order_to_submit, project=OrderType.SARS_COV_2
- ),
- }
+def invalid_balsamic_order_to_submit(invalid_cgweb_orders_dir: Path) -> dict:
+ """Load an invalid example Balsamic order."""
+ return ReadFile.get_content_from_file(
+ file_format=FileFormat.JSON, file_path=Path(invalid_cgweb_orders_dir, "balsamic_FAIL.json")
+ )
diff --git a/tests/fixture_plugins/orders_fixtures/path_fixtures.py b/tests/fixture_plugins/orders_fixtures/path_fixtures.py
new file mode 100644
index 0000000000..74eea596cf
--- /dev/null
+++ b/tests/fixture_plugins/orders_fixtures/path_fixtures.py
@@ -0,0 +1,15 @@
+from pathlib import Path
+
+import pytest
+
+
+@pytest.fixture(scope="session")
+def cgweb_orders_dir(fixtures_dir: Path) -> Path:
+ """Return the path to the cgweb_orders dir."""
+ return Path(fixtures_dir, "cgweb_orders")
+
+
+@pytest.fixture(scope="session")
+def invalid_cgweb_orders_dir(fixtures_dir: Path) -> Path:
+ """Return the path to the invalid_cgweb_orders dir."""
+ return Path(fixtures_dir, "invalid_cgweb_orders")
diff --git a/tests/fixture_plugins/orders_fixtures/services_fixtures.py b/tests/fixture_plugins/orders_fixtures/services_fixtures.py
new file mode 100644
index 0000000000..ea0823371f
--- /dev/null
+++ b/tests/fixture_plugins/orders_fixtures/services_fixtures.py
@@ -0,0 +1,55 @@
+import pytest
+
+from cg.clients.freshdesk.freshdesk_client import FreshdeskClient
+from cg.services.orders.storing.service_registry import (
+ StoringServiceRegistry,
+ setup_storing_service_registry,
+)
+from cg.services.orders.submitter.service import OrderSubmitter
+from cg.services.orders.submitter.ticket_handler import TicketHandler
+from cg.services.orders.validation.model_validator.model_validator import ModelValidator
+from cg.services.orders.validation.service import OrderValidationService
+from cg.store.store import Store
+from tests.mocks.limsmock import MockLimsAPI
+
+
+@pytest.fixture
+def freshdesk_client() -> FreshdeskClient:
+ return FreshdeskClient(base_url="https://mock.freshdesk.com", api_key="mock_api_key")
+
+
+@pytest.fixture
+def model_validator() -> ModelValidator:
+ return ModelValidator()
+
+
+@pytest.fixture
+def order_validation_service(store_to_submit_and_validate_orders: Store) -> OrderValidationService:
+ return OrderValidationService(store_to_submit_and_validate_orders)
+
+
+@pytest.fixture(scope="function")
+def order_submitter(
+ ticket_handler: TicketHandler,
+ storing_service_registry: StoringServiceRegistry,
+ order_validation_service: OrderValidationService,
+) -> OrderSubmitter:
+ return OrderSubmitter(
+ ticket_handler=ticket_handler,
+ storing_registry=storing_service_registry,
+ validation_service=order_validation_service,
+ )
+
+
+@pytest.fixture
+def storing_service_registry(
+ store_to_submit_and_validate_orders: Store, lims_api: MockLimsAPI
+) -> StoringServiceRegistry:
+ return setup_storing_service_registry(
+ lims=lims_api, status_db=store_to_submit_and_validate_orders
+ )
+
+
+@pytest.fixture
+def ticket_handler(store: Store, freshdesk_client: FreshdeskClient) -> TicketHandler:
+ return TicketHandler(db=store, client=freshdesk_client, system_email_id=12345, env="production")
diff --git a/tests/fixture_plugins/orders_fixtures/status_data_fixtures.py b/tests/fixture_plugins/orders_fixtures/status_data_fixtures.py
deleted file mode 100644
index adff0bc761..0000000000
--- a/tests/fixture_plugins/orders_fixtures/status_data_fixtures.py
+++ /dev/null
@@ -1,119 +0,0 @@
-import pytest
-
-from cg.models.orders.constants import OrderType
-from cg.models.orders.order import OrderIn
-from cg.services.orders.store_order_services.store_case_order import StoreCaseOrderService
-from cg.services.orders.store_order_services.store_fastq_order_service import StoreFastqOrderService
-from cg.services.orders.store_order_services.store_metagenome_order import (
- StoreMetagenomeOrderService,
-)
-from cg.services.orders.store_order_services.store_microbial_fastq_order_service import (
- StoreMicrobialFastqOrderService,
-)
-from cg.services.orders.store_order_services.store_microbial_order import StoreMicrobialOrderService
-from cg.services.orders.store_order_services.store_pacbio_order_service import (
- StorePacBioOrderService,
-)
-from cg.services.orders.store_order_services.store_pool_order import StorePoolOrderService
-
-
-@pytest.fixture
-def balsamic_status_data(
- balsamic_order_to_submit: dict, store_generic_order_service: StoreCaseOrderService
-) -> dict:
- """Parse balsamic order example."""
- project: OrderType = OrderType.BALSAMIC
- order: OrderIn = OrderIn.parse_obj(obj=balsamic_order_to_submit, project=project)
- return store_generic_order_service.order_to_status(order=order)
-
-
-@pytest.fixture
-def fastq_status_data(
- fastq_order_to_submit, store_fastq_order_service: StoreFastqOrderService
-) -> dict:
- """Parse fastq order example."""
- project: OrderType = OrderType.FASTQ
- order: OrderIn = OrderIn.parse_obj(obj=fastq_order_to_submit, project=project)
- return store_fastq_order_service.order_to_status(order=order)
-
-
-@pytest.fixture
-def pacbio_status_data(
- pacbio_order_to_submit: dict, store_pacbio_order_service: StorePacBioOrderService
-) -> dict:
- """Parse pacbio order example."""
- project: OrderType = OrderType.PACBIO_LONG_READ
- order: OrderIn = OrderIn.parse_obj(obj=pacbio_order_to_submit, project=project)
- return store_pacbio_order_service.order_to_status(order=order)
-
-
-@pytest.fixture
-def metagenome_status_data(
- metagenome_order_to_submit: dict, store_metagenome_order_service: StoreMetagenomeOrderService
-) -> dict:
- """Parse metagenome order example."""
- project: OrderType = OrderType.METAGENOME
- order: OrderIn = OrderIn.parse_obj(obj=metagenome_order_to_submit, project=project)
-
- return store_metagenome_order_service.order_to_status(order=order)
-
-
-@pytest.fixture
-def microbial_status_data(
- microbial_order_to_submit: dict, store_microbial_order_service: StoreMicrobialOrderService
-) -> dict:
- """Parse microbial order example."""
- project: OrderType = OrderType.MICROSALT
- order: OrderIn = OrderIn.parse_obj(obj=microbial_order_to_submit, project=project)
- return store_microbial_order_service.order_to_status(order=order)
-
-
-@pytest.fixture
-def microbial_fastq_status_data(
- microbial_fastq_order_to_submit: dict,
- store_microbial_fastq_order_service: StoreMicrobialFastqOrderService,
-) -> dict:
- """Parse microbial order example."""
- project: OrderType = OrderType.MICROBIAL_FASTQ
- order: OrderIn = OrderIn.parse_obj(obj=microbial_fastq_order_to_submit, project=project)
- return store_microbial_fastq_order_service.order_to_status(order=order)
-
-
-@pytest.fixture
-def mip_rna_status_data(
- mip_rna_order_to_submit: dict, store_generic_order_service: StoreCaseOrderService
-) -> dict:
- """Parse rna order example."""
- project: OrderType = OrderType.MIP_RNA
- order: OrderIn = OrderIn.parse_obj(obj=mip_rna_order_to_submit, project=project)
- return store_generic_order_service.order_to_status(order=order)
-
-
-@pytest.fixture
-def mip_status_data(
- mip_order_to_submit: dict, store_generic_order_service: StoreCaseOrderService
-) -> dict:
- """Parse scout order example."""
- project: OrderType = OrderType.MIP_DNA
- order: OrderIn = OrderIn.parse_obj(obj=mip_order_to_submit, project=project)
- return store_generic_order_service.order_to_status(order=order)
-
-
-@pytest.fixture
-def rml_status_data(
- rml_order_to_submit: dict, store_pool_order_service: StorePoolOrderService
-) -> dict:
- """Parse rml order example."""
- project: OrderType = OrderType.RML
- order: OrderIn = OrderIn.parse_obj(obj=rml_order_to_submit, project=project)
- return store_pool_order_service.order_to_status(order=order)
-
-
-@pytest.fixture
-def tomte_status_data(
- tomte_order_to_submit: dict, store_generic_order_service: StoreCaseOrderService
-) -> dict:
- """Parse TOMTE order example."""
- project: OrderType = OrderType.TOMTE
- order: OrderIn = OrderIn.parse_obj(obj=tomte_order_to_submit, project=project)
- return store_generic_order_service.order_to_status(order=order)
diff --git a/tests/fixture_plugins/orders_fixtures/store_fixtures.py b/tests/fixture_plugins/orders_fixtures/store_fixtures.py
new file mode 100644
index 0000000000..e5f86d461e
--- /dev/null
+++ b/tests/fixture_plugins/orders_fixtures/store_fixtures.py
@@ -0,0 +1,47 @@
+"""Store fixtures for the order services tests."""
+
+import pytest
+
+from cg.models.orders.constants import OrderType
+from cg.services.orders.storing.constants import MAF_ORDER_ID
+from cg.store.models import ApplicationVersion, Customer, Order
+from cg.store.store import Store
+from tests.store_helpers import StoreHelpers
+
+
+@pytest.fixture
+def store_to_submit_and_validate_orders(
+ store: Store, helpers: StoreHelpers, customer_id: str
+) -> Store:
+ app_tags: dict[str, list[OrderType]] = {
+ "PANKTTR100": [OrderType.BALSAMIC],
+ "WGSPCFC030": [OrderType.FASTQ, OrderType.MIP_DNA],
+ "RMLP15R100": [OrderType.FLUFFY, OrderType.RML],
+ "RMLP15R200": [OrderType.FLUFFY, OrderType.RML],
+ "RMLP15R400": [OrderType.FLUFFY, OrderType.RML],
+ "RMLP15R500": [OrderType.FLUFFY, OrderType.RML],
+ "METPCFR030": [OrderType.METAGENOME],
+ "METWPFR030": [OrderType.METAGENOME, OrderType.TAXPROFILER],
+ "MWRNXTR003": [OrderType.MICROBIAL_FASTQ, OrderType.MICROSALT],
+ "MWXNXTR003": [OrderType.MICROSALT],
+ "VWGNXTR001": [OrderType.MICROSALT],
+ "WGSWPFC030": [OrderType.MIP_DNA],
+ "RNAPOAR025": [OrderType.MIP_RNA, OrderType.RNAFUSION, OrderType.TOMTE],
+ "LWPBELB070": [OrderType.PACBIO_LONG_READ],
+ "VWGDPTR001": [OrderType.SARS_COV_2],
+ }
+ for tag, orders in app_tags.items():
+ application_version: ApplicationVersion = helpers.ensure_application_version(
+ store=store, application_tag=tag
+ )
+ application_version.application.order_types = orders
+ customer: Customer = helpers.ensure_customer(store=store, customer_id=customer_id)
+ helpers.ensure_user(store=store, customer=customer)
+ helpers.ensure_panel(store=store, panel_abbreviation="AID")
+ helpers.ensure_panel(store=store, panel_abbreviation="Ataxi")
+ helpers.ensure_panel(store=store, panel_abbreviation="IEM")
+ helpers.ensure_panel(store=store, panel_abbreviation="OMIM-AUTO")
+ order = Order(customer_id=1, id=MAF_ORDER_ID, ticket_id=100000000)
+ store.add_item_to_store(order)
+ store.commit_to_store()
+ return store
diff --git a/tests/fixture_plugins/orders_fixtures/store_service_fixtures.py b/tests/fixture_plugins/orders_fixtures/store_service_fixtures.py
new file mode 100644
index 0000000000..9a7b86c9aa
--- /dev/null
+++ b/tests/fixture_plugins/orders_fixtures/store_service_fixtures.py
@@ -0,0 +1,82 @@
+import pytest
+
+from cg.services.orders.lims_service.service import OrderLimsService
+from cg.services.orders.storing.implementations.case_order_service import StoreCaseOrderService
+from cg.services.orders.storing.implementations.fastq_order_service import StoreFastqOrderService
+from cg.services.orders.storing.implementations.metagenome_order_service import (
+ StoreMetagenomeOrderService,
+)
+from cg.services.orders.storing.implementations.microbial_fastq_order_service import (
+ StoreMicrobialFastqOrderService,
+)
+from cg.services.orders.storing.implementations.microbial_order_service import (
+ StoreMicrobialOrderService,
+)
+from cg.services.orders.storing.implementations.pacbio_order_service import StorePacBioOrderService
+from cg.services.orders.storing.implementations.pool_order_service import StorePoolOrderService
+from cg.store.store import Store
+from tests.mocks.limsmock import MockLimsAPI
+
+
+@pytest.fixture
+def store_generic_order_service(
+ store_to_submit_and_validate_orders: Store, lims_api: MockLimsAPI
+) -> StoreCaseOrderService:
+ return StoreCaseOrderService(
+ status_db=store_to_submit_and_validate_orders, lims_service=OrderLimsService(lims_api)
+ )
+
+
+@pytest.fixture
+def store_pool_order_service(
+ store_to_submit_and_validate_orders: Store, lims_api: MockLimsAPI
+) -> StorePoolOrderService:
+ return StorePoolOrderService(
+ status_db=store_to_submit_and_validate_orders, lims_service=OrderLimsService(lims_api)
+ )
+
+
+@pytest.fixture
+def store_fastq_order_service(
+ store_to_submit_and_validate_orders: Store, lims_api: MockLimsAPI
+) -> StoreFastqOrderService:
+ return StoreFastqOrderService(
+ status_db=store_to_submit_and_validate_orders, lims_service=OrderLimsService(lims_api)
+ )
+
+
+@pytest.fixture
+def store_pacbio_order_service(
+ store_to_submit_and_validate_orders: Store, lims_api: MockLimsAPI
+) -> StorePacBioOrderService:
+ return StorePacBioOrderService(
+ status_db=store_to_submit_and_validate_orders, lims_service=OrderLimsService(lims_api)
+ )
+
+
+@pytest.fixture
+def store_metagenome_order_service(
+ store_to_submit_and_validate_orders: Store, lims_api: MockLimsAPI
+) -> StoreMetagenomeOrderService:
+ return StoreMetagenomeOrderService(
+ status_db=store_to_submit_and_validate_orders, lims_service=OrderLimsService(lims_api)
+ )
+
+
+@pytest.fixture
+def store_microbial_order_service(
+ store_to_submit_and_validate_orders: Store,
+ lims_api: MockLimsAPI,
+) -> StoreMicrobialOrderService:
+ return StoreMicrobialOrderService(
+ status_db=store_to_submit_and_validate_orders, lims_service=OrderLimsService(lims_api)
+ )
+
+
+@pytest.fixture
+def store_microbial_fastq_order_service(
+ store_to_submit_and_validate_orders: Store, lims_api: MockLimsAPI
+) -> StoreMicrobialFastqOrderService:
+ return StoreMicrobialFastqOrderService(
+ status_db=store_to_submit_and_validate_orders, lims_service=OrderLimsService(lims_api)
+ )
diff --git a/tests/fixtures/cgweb_orders/balsamic.json b/tests/fixtures/cgweb_orders/balsamic.json
index 17179dde03..342f1798ac 100644
--- a/tests/fixtures/cgweb_orders/balsamic.json
+++ b/tests/fixtures/cgweb_orders/balsamic.json
@@ -1,43 +1,61 @@
{
- "name": "#123456",
- "customer": "cust000",
- "comment": "",
- "samples": [
- {
- "age_at_sampling": "17.18192",
- "application": "WGSPCFC030",
- "capture_kit": "other",
- "cohorts": [
- ""
- ],
- "comment": "other Elution buffer",
- "container": "96 well plate",
- "concentration_ng_ul": "18",
- "container_name": "p1",
- "data_analysis": "balsamic",
- "data_delivery": "fastq-analysis-scout",
- "elution_buffer": "Other (specify in 'Comments')",
- "family_name": "family1",
- "formalin_fixation_time": "1",
- "name": "s1",
- "phenotype_groups": [
- ""
- ],
- "phenotype_terms": [
- ""
- ],
- "post_formalin_fixation_time": "2",
- "priority": "standard",
- "quantity": "2",
- "sex": "male",
- "source": "blood",
- "subject_id": "subject1",
- "synopsis": "",
- "tissue_block_size": "small",
- "tumour": true,
- "tumour_purity": "75",
- "volume": "1",
- "well_position": "A:1"
- }
- ]
-}
+ "cases": [
+ {
+ "cohorts": null,
+ "name": "BalsamicCase",
+ "panels": null,
+ "priority": "standard",
+ "samples": [
+ {
+ "age_at_sampling": "17.2",
+ "application": "PANKTTR100",
+ "capture_kit": "GMCKsolid",
+ "comment": "This is a sample comment",
+ "concentration_ng_ul": null,
+ "container": "96 well plate",
+ "container_name": "BalsamicPlate",
+ "control": null,
+ "data_analysis": null,
+ "data_delivery": null,
+ "elution_buffer": "Tris-HCl",
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": "15",
+ "mother": null,
+ "name": "BalsamicSample",
+ "phenotype_groups": [
+ "PhGroup"
+ ],
+ "phenotype_terms": [
+ "PhTerm"
+ ],
+ "post_formalin_fixation_time": "3",
+ "priority": null,
+ "quantity": null,
+ "reference_genome": null,
+ "require_qc_ok": false,
+ "sex": "male",
+ "source": "cytology (FFPE)",
+ "source_comment": null,
+ "status": null,
+ "subject_id": "Subject1",
+ "tissue_block_size": "large",
+ "tumour": true,
+ "tumour_purity": "13",
+ "volume": 42,
+ "well_position": "A:1"
+ }
+ ],
+ "synopsis": "A synopsis"
+ }
+ ],
+ "comment": null,
+ "customer": "cust000",
+ "data_analysis": "balsamic",
+ "data_delivery": null,
+ "delivery_type": "analysis-scout",
+ "name": "BalsamicOrder",
+ "project_type": "balsamic",
+ "ticket": null,
+ "user_id": 1
+}
\ No newline at end of file
diff --git a/tests/fixtures/cgweb_orders/fastq.json b/tests/fixtures/cgweb_orders/fastq.json
index e23dce1559..64ad2a93a3 100644
--- a/tests/fixtures/cgweb_orders/fastq.json
+++ b/tests/fixtures/cgweb_orders/fastq.json
@@ -1,39 +1,129 @@
{
- "name": "Fastq order",
- "customer": "cust002",
- "comment": "",
- "samples": [
- {
- "application": "WGSPCFC060",
- "comment": "",
- "container": "Tube",
- "container_name": "prov1",
- "data_analysis": "raw-data",
- "data_delivery": "fastq",
- "elution_buffer": "Nuclease-free water",
- "name": "prov1",
- "priority": "priority",
- "sex": "male",
- "source": "blood",
- "tumour": false,
- "volume": "1",
- "well_position": ""
- },
- {
- "application": "WGSPCFC060",
- "comment": "",
- "container": "Tube",
- "container_name": "prov2",
- "data_analysis": "raw-data",
- "data_delivery": "fastq",
- "elution_buffer": "Nuclease-free water",
- "name": "prov2",
- "priority": "priority",
- "sex": "male",
- "source": "cell line",
- "tumour": true,
- "volume": "2",
- "well_position": ""
- }
- ]
-}
+ "comment": "TestComment",
+ "customer": "cust000",
+ "data_delivery": null,
+ "delivery_type": "fastq",
+ "name": "FastqOrder",
+ "project_type": "fastq",
+ "samples": [
+ {
+ "age_at_sampling": null,
+ "application": "WGSPCFC030",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "This is a test comment",
+ "concentration": null,
+ "concentration_ng_ul": 65,
+ "concentration_sample": null,
+ "container": "96 well plate",
+ "container_name": "Plate1",
+ "control": null,
+ "custom_index": null,
+ "data_analysis": null,
+ "data_delivery": null,
+ "elution_buffer": "Nuclease-free water",
+ "extraction_method": null,
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": null,
+ "index_number": null,
+ "index_sequence": null,
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "Sample1",
+ "organism": null,
+ "organism_other": null,
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": null,
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "standard",
+ "quantity": "15",
+ "reagent_label": null,
+ "reference_genome": null,
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": false,
+ "rml_plate_name": null,
+ "selection_criteria": null,
+ "sex": "male",
+ "source": "blood",
+ "source_comment": null,
+ "status": null,
+ "subject_id": "subject1",
+ "tissue_block_size": null,
+ "tumour": false,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": 54,
+ "well_position": "A:1",
+ "well_position_rml": null
+ },
+ {
+ "age_at_sampling": null,
+ "application": "WGSPCFC030",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "This is another test comment",
+ "concentration": null,
+ "concentration_ng_ul": 33,
+ "concentration_sample": null,
+ "container": "96 well plate",
+ "container_name": "Plate1",
+ "control": null,
+ "custom_index": null,
+ "data_analysis": null,
+ "data_delivery": null,
+ "elution_buffer": "Nuclease-free water",
+ "extraction_method": null,
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": null,
+ "index_number": null,
+ "index_sequence": null,
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "Sample2",
+ "organism": null,
+ "organism_other": null,
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": null,
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "standard",
+ "quantity": "15",
+ "reagent_label": null,
+ "reference_genome": null,
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": true,
+ "rml_plate_name": null,
+ "selection_criteria": null,
+ "sex": "female",
+ "source": "blood",
+ "source_comment": null,
+ "status": null,
+ "subject_id": "subject2",
+ "tissue_block_size": null,
+ "tumour": true,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": 54,
+ "well_position": "B:1",
+ "well_position_rml": null
+ }
+ ],
+ "user_id": 1
+}
\ No newline at end of file
diff --git a/tests/fixtures/cgweb_orders/fluffy.json b/tests/fixtures/cgweb_orders/fluffy.json
new file mode 100644
index 0000000000..091f9796a7
--- /dev/null
+++ b/tests/fixtures/cgweb_orders/fluffy.json
@@ -0,0 +1,249 @@
+{
+ "cases": [],
+ "comment": null,
+ "customer": "cust000",
+ "data_delivery": null,
+ "delivery_type": "statina",
+ "name": "1604.19.rml",
+ "project_type": "fluffy",
+ "samples": [
+ {
+ "age_at_sampling": null,
+ "application": "RMLP15R100",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "comment",
+ "concentration": "2",
+ "concentration_ng_ul": null,
+ "concentration_sample": "4",
+ "container": null,
+ "container_name": null,
+ "control": "positive",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "FLUFFY",
+ "data_delivery": "statina",
+ "elution_buffer": null,
+ "extraction_method": null,
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": "IDT DupSeq 10 bp Set B",
+ "index_number": "3",
+ "index_sequence": "",
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "fluffysample1",
+ "organism": null,
+ "organism_other": null,
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": "pool1",
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "research",
+ "quantity": null,
+ "reagent_label": "C01 IDT_10nt_568 (TGTGAGCGAA-AACTCCGATC)",
+ "reference_genome": null,
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": false,
+ "rml_plate_name": "plate1",
+ "selection_criteria": null,
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "20",
+ "well_position": null,
+ "well_position_rml": "A:1"
+ },
+ {
+ "age_at_sampling": null,
+ "application": "RMLP15R200",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "",
+ "concentration": "2",
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": null,
+ "container_name": null,
+ "control": "negative",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "FLUFFY",
+ "data_delivery": "statina",
+ "elution_buffer": null,
+ "extraction_method": null,
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": "TruSeq DNA HT Dual-index (D7-D5)",
+ "index_number": "3",
+ "index_sequence": "C01 - D701-D503 (ATTACTCG-CCTATCCT)",
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "fluffysample2",
+ "organism": null,
+ "organism_other": null,
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": "pool2",
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "clinical_trials",
+ "quantity": null,
+ "reagent_label": "C01 - D701-D503 (ATTACTCG-CCTATCCT)",
+ "reference_genome": null,
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": false,
+ "rml_plate_name": "",
+ "selection_criteria": null,
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "21",
+ "well_position": null,
+ "well_position_rml": ""
+ },
+ {
+ "age_at_sampling": null,
+ "application": "RMLP15R400",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "",
+ "concentration": "2",
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": null,
+ "container_name": null,
+ "control": "",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "FLUFFY",
+ "data_delivery": "statina",
+ "elution_buffer": null,
+ "extraction_method": null,
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": "NEXTflex® v2 UDI Barcodes 1 - 96",
+ "index_number": "3",
+ "index_sequence": "UDI 3 (CGCTGCTC-GGCAGATC)",
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "fluffysample3",
+ "organism": null,
+ "organism_other": null,
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": "pool3",
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "standard",
+ "quantity": null,
+ "reagent_label": "UDI3 (CGCTGCTC-GGCAGATC)",
+ "reference_genome": null,
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": false,
+ "rml_plate_name": "",
+ "selection_criteria": null,
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "22",
+ "well_position": null,
+ "well_position_rml": ""
+ },
+ {
+ "age_at_sampling": null,
+ "application": "RMLP15R500",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "",
+ "concentration": "2",
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": null,
+ "container_name": null,
+ "control": "",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "FLUFFY",
+ "data_delivery": "statina",
+ "elution_buffer": null,
+ "extraction_method": null,
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": "NEXTflex® v2 UDI Barcodes 1 - 96",
+ "index_number": "3",
+ "index_sequence": "UDI 3 (CGCTGCTC-GGCAGATC)",
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "fluffysample4",
+ "organism": null,
+ "organism_other": null,
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": "pool4",
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "priority",
+ "quantity": null,
+ "reagent_label": "UDI 3 (CGCTGCTC-GGCAGATC)",
+ "reference_genome": null,
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": false,
+ "rml_plate_name": "",
+ "selection_criteria": null,
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "23",
+ "well_position": null,
+ "well_position_rml": "A:1"
+ }
+ ],
+ "ticket": null,
+ "user_id": 1
+}
\ No newline at end of file
diff --git a/tests/fixtures/cgweb_orders/metagenome.json b/tests/fixtures/cgweb_orders/metagenome.json
index a29bb7ff98..d5724c0b68 100644
--- a/tests/fixtures/cgweb_orders/metagenome.json
+++ b/tests/fixtures/cgweb_orders/metagenome.json
@@ -1,43 +1,43 @@
{
- "name": "Metagenome",
- "customer": "cust000",
- "comment": "",
- "samples": [
- {
- "name": "Bristol",
- "container": "96 well plate",
- "application": "METLIFR020",
- "data_analysis": "raw-data",
- "data_delivery": "fastq",
- "require_qc_ok": false,
- "elution_buffer": "Tris-HCl",
- "source": "faeces",
- "priority": "standard",
- "container_name": "Platen",
- "well_position": "A:1",
- "concentration_sample": "2",
- "quantity": "10",
- "extraction_method": "best",
- "volume": "1",
- "comment": "5 on the chart"
- },
- {
- "name": "Trefyrasex",
- "container": "Tube",
- "application": "METNXTR020",
- "data_analysis": "raw-data",
- "data_delivery": "fastq",
- "require_qc_ok": true,
- "elution_buffer": "Nuclease-free water",
- "source": "blood",
- "priority": "priority",
- "container_name": "Tuben",
- "well_position": "",
- "concentration_sample": "1",
- "quantity": "2",
- "extraction_method": "unknown",
- "volume": "2",
- "comment": "test"
- }
- ]
-}
+ "comment": null,
+ "customer": "cust000",
+ "data_delivery": "fastq",
+ "dataAnalysis": "raw-data",
+ "delivery_type": "fastq",
+ "name": "Metagenome",
+ "project_type": "metagenome",
+ "samples": [
+ {
+ "application": "METPCFR030",
+ "comment": "5 on the chart",
+ "container": "96 well plate",
+ "container_name": "Platen",
+ "elution_buffer": "Tris-HCl",
+ "name": "Bristol",
+ "organism": null,
+ "priority": "standard",
+ "processedAt": "2025-01-03T09:00:27.876Z",
+ "quantity": "10",
+ "require_qc_ok": false,
+ "source": "blood",
+ "volume": 20,
+ "well_position": "A:1"
+ },
+ {
+ "application": "METWPFR030",
+ "comment": "test",
+ "container": "Tube",
+ "container_name": "Tuben",
+ "elution_buffer": "Nuclease-free water",
+ "name": "Trefyrasex",
+ "organism": null,
+ "priority": "priority",
+ "processedAt": "2025-01-03T09:00:27.876Z",
+ "quantity": "2",
+ "require_qc_ok": true,
+ "source": "buccal swab",
+ "volume": 21
+ }
+ ],
+ "user_id": 1
+}
\ No newline at end of file
diff --git a/tests/fixtures/cgweb_orders/microbial_fastq.json b/tests/fixtures/cgweb_orders/microbial_fastq.json
index 57497dcf02..e85d9116da 100644
--- a/tests/fixtures/cgweb_orders/microbial_fastq.json
+++ b/tests/fixtures/cgweb_orders/microbial_fastq.json
@@ -1,33 +1,36 @@
{
"name": "Microbial Fastq order",
- "customer": "cust002",
+ "customer": "cust000",
+ "delivery_type": "fastq",
+ "project_type": "microbial-fastq",
+ "user_id": 0,
"comment": "",
"samples": [
{
- "application": "WGSPCFC060",
+ "application": "MWRNXTR003",
"comment": "sample comment",
"container": "Tube",
"container_name": "prov1",
- "data_analysis": "microsalt",
+ "data_analysis": "raw-data",
"data_delivery": "fastq",
"elution_buffer": "Nuclease-free water",
"name": "prov1",
"priority": "priority",
- "volume": "1",
+ "volume": "100",
"well_position": ""
},
{
- "application": "WGSPCFC060",
+ "application": "MWRNXTR003",
"comment": "sample comment",
- "container": "Tube",
+ "container": "96 well plate",
"container_name": "prov2",
"data_analysis": "raw-data",
"data_delivery": "fastq",
"elution_buffer": "Nuclease-free water",
"name": "prov2",
"priority": "priority",
- "volume": "2",
- "well_position": ""
+ "volume": "20",
+ "well_position": "A:1"
}
]
}
\ No newline at end of file
diff --git a/tests/fixtures/cgweb_orders/microsalt.json b/tests/fixtures/cgweb_orders/microsalt.json
index 1273901f63..3fc22ea8f9 100644
--- a/tests/fixtures/cgweb_orders/microsalt.json
+++ b/tests/fixtures/cgweb_orders/microsalt.json
@@ -1,92 +1,309 @@
{
- "name": "Microbial samples",
- "customer": "cust002",
- "comment": "Order comment",
- "samples": [
- {
- "name": "all-fields",
- "application": "MWRNXTR003",
- "data_analysis": "microsalt",
- "data_delivery": "fastq",
- "volume": "1",
- "priority": "research",
- "require_qc_ok": true,
- "organism": "M.upium",
- "reference_genome": "NC_111",
- "elution_buffer": "Nuclease-free water",
- "extraction_method": "MagNaPure 96 (contact Clinical Genomics before submission)",
- "container": "96 well plate",
- "container_name": "name of plate",
- "well_position": "A:1",
- "comment": "plate comment"
- },
- {
- "name": "required-fields",
- "application": "MWRNXTR003",
- "data_analysis": "microsalt",
- "data_delivery": "fastq",
- "volume": "2",
- "priority": "standard",
- "require_qc_ok": true,
- "organism": "C. difficile",
- "reference_genome": "NC_222",
- "elution_buffer": "Nuclease-free water",
- "extraction_method": "MagNaPure 96 (contact Clinical Genomics before submission)",
- "container": "Tube",
- "container_name": "required-fields",
- "well_position": "",
- "comment": ""
- },
- {
- "name": "plate-fields",
- "application": "MWRNXTR003",
- "data_analysis": "microsalt",
- "data_delivery": "fastq",
- "volume": "3",
- "priority": "research",
- "require_qc_ok": true,
- "organism": "C. difficile",
- "reference_genome": "NC_333",
- "elution_buffer": "Nuclease-free water",
- "extraction_method": "MagNaPure 96 (contact Clinical Genomics before submission)",
- "container": "96 well plate",
- "container_name": "name of plate",
- "well_position": "A:2",
- "comment": ""
- },
- {
- "name": "other-species-fields",
- "application": "MWRNXTR003",
- "data_analysis": "microsalt",
- "data_delivery": "fastq",
- "volume": "4",
- "priority": "research",
- "require_qc_ok": true,
- "organism": "M.upium",
- "reference_genome": "NC_444",
- "elution_buffer": "Nuclease-free water",
- "extraction_method": "MagNaPure 96 (contact Clinical Genomics before submission)",
- "container": "Tube",
- "container_name": "other-species-fields",
- "well_position": "",
- "comment": ""
- },
- {
- "name": "optional-fields",
- "application": "MWRNXTR003",
- "data_analysis": "microsalt",
- "data_delivery": "fastq",
- "volume": "5",
- "priority": "research",
- "require_qc_ok": false,
- "organism": "C. difficile",
- "reference_genome": "NC_555",
- "elution_buffer": "Nuclease-free water",
- "extraction_method": "MagNaPure 96 (contact Clinical Genomics before submission)",
- "container": "Tube",
- "container_name": "optional-fields",
- "well_position": "",
- "comment": "optional comment"
- }
- ]
-}
+ "cases": [],
+ "comment": null,
+ "customer": "cust000",
+ "data_analysis": null,
+ "data_delivery": null,
+ "delivery_type": "fastq_qc-analysis",
+ "name": "1603.11.microbial",
+ "project_type": "microsalt",
+ "samples": [
+ {
+ "age_at_sampling": null,
+ "application": "MWRNXTR003",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "comments",
+ "concentration": null,
+ "concentration_ng_ul": null,
+ "concentration_sample": "2",
+ "container": "96 well plate",
+ "container_name": "plate1",
+ "control": "positive",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "MIP DNA",
+ "data_delivery": "fastq qc + analysis",
+ "elution_buffer": "Other (specify in \"Comments\")",
+ "extraction_method": "MagNaPure 96 (contact Clinical Genomics before submission)",
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": null,
+ "index_number": null,
+ "index_sequence": null,
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "microbialsample1",
+ "organism": "C. jejuni",
+ "organism_other": null,
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": null,
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "research",
+ "quantity": "3",
+ "reagent_label": null,
+ "reference_genome": "NC_000001",
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": null,
+ "rml_plate_name": null,
+ "selection_criteria": null,
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "20",
+ "well_position": "A:1",
+ "well_position_rml": null
+ },
+ {
+ "age_at_sampling": null,
+ "application": "MWXNXTR003",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "",
+ "concentration": null,
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": "96 well plate",
+ "container_name": "plate1",
+ "control": "negative",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "MIP DNA",
+ "data_delivery": "fastq qc + analysis",
+ "elution_buffer": "Nuclease-free water",
+ "extraction_method": "MagNaPure 96 (contact Clinical Genomics before submission)",
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": null,
+ "index_number": null,
+ "index_sequence": null,
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "microbialsample2",
+ "organism": "C. jejuni",
+ "organism_other": "",
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": null,
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "standard",
+ "quantity": null,
+ "reagent_label": null,
+ "reference_genome": "NC_000002",
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": null,
+ "rml_plate_name": null,
+ "selection_criteria": null,
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "21",
+ "well_position": "B:1",
+ "well_position_rml": null
+ },
+ {
+ "age_at_sampling": null,
+ "application": "VWGNXTR001",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "",
+ "concentration": null,
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": "Tube",
+ "container_name": "",
+ "control": "",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "MIP DNA",
+ "data_delivery": "fastq qc + analysis",
+ "elution_buffer": "Tris-HCl",
+ "extraction_method": "EZ1",
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": null,
+ "index_number": null,
+ "index_sequence": null,
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "microbialsample3",
+ "organism": "C. difficile",
+ "organism_other": "",
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": null,
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "priority",
+ "quantity": null,
+ "reagent_label": null,
+ "reference_genome": "NC_000003",
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": null,
+ "rml_plate_name": null,
+ "selection_criteria": null,
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "22",
+ "well_position": "",
+ "well_position_rml": null
+ },
+ {
+ "age_at_sampling": null,
+ "application": "VWGNXTR001",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "",
+ "concentration": null,
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": "Tube",
+ "container_name": "",
+ "control": "",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "MIP DNA",
+ "data_delivery": "fastq qc + analysis",
+ "elution_buffer": "Nuclease-free water",
+ "extraction_method": "QIAsymphony",
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": null,
+ "index_number": null,
+ "index_sequence": null,
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "microbialsample4",
+ "organism": "E. faecalis",
+ "organism_other": "",
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": null,
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "express",
+ "quantity": null,
+ "reagent_label": null,
+ "reference_genome": "NC_000004",
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": null,
+ "rml_plate_name": null,
+ "selection_criteria": null,
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "23",
+ "well_position": "",
+ "well_position_rml": null
+ },
+ {
+ "age_at_sampling": null,
+ "application": "MWRNXTR003",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "",
+ "concentration": null,
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": "Tube",
+ "container_name": "",
+ "control": "",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "MIP DNA",
+ "data_delivery": "fastq qc + analysis",
+ "elution_buffer": "Tris-HCl",
+ "extraction_method": "Qiagen MagAttract",
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": null,
+ "index_number": null,
+ "index_sequence": null,
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "microbialsample5",
+ "organism": "E. faecium",
+ "organism_other": "",
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": null,
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "research",
+ "quantity": null,
+ "reagent_label": null,
+ "reference_genome": "NC_000005",
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": null,
+ "rml_plate_name": null,
+ "selection_criteria": null,
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "24",
+ "well_position": "",
+ "well_position_rml": null
+ }
+ ],
+ "ticket": null,
+ "user_id": 1
+}
\ No newline at end of file
diff --git a/tests/fixtures/cgweb_orders/mip.json b/tests/fixtures/cgweb_orders/mip.json
index 202c4337e0..03e063feaf 100644
--- a/tests/fixtures/cgweb_orders/mip.json
+++ b/tests/fixtures/cgweb_orders/mip.json
@@ -1,128 +1,180 @@
{
- "name": "#123456",
- "customer": "cust000",
- "comment": "",
- "samples": [
- {
- "age_at_sampling": "17.18192",
- "application": "WGSPCFC030",
- "cohorts":["Other"],
- "comment": "comment",
- "container": "96 well plate",
- "container_name": "CMMS",
- "data_analysis": "mip-dna",
- "data_delivery": "scout",
- "family_name": "family1",
- "father": "sample3",
- "mother": "sample2",
- "name": "sample1",
- "panels": [
- "IEM"
- ],
- "phenotype_groups":["Phenotype-group"],
- "phenotype_terms":["HP:0012747","HP:0025049"],
- "priority": "standard",
- "quantity": "220",
- "sex": "female",
- "source": "tissue (fresh frozen)",
- "status": "affected",
- "subject_id": "subject1",
- "synopsis":"As for the synopsis it will be this overly complex sentence to prove that the synopsis field might in fact be a very long string, which we should be prepared for.",
- "tumour": true,
- "volume": "1",
- "well_position": "A:1"
- },
- {
- "age_at_sampling": "2.0",
- "application": "WGSPCFC030",
- "cohorts": ["Other"],
- "comment": "this is a sample comment",
- "container": "96 well plate",
- "container_name": "CMMS",
- "data_analysis": "mip-dna",
- "data_delivery": "scout",
- "family_name": "family1",
- "name": "sample2",
- "panels": [
- "IEM"
- ],
- "synopsis":"As for the synopsis it will be this overly complex sentence to prove that the synopsis field might in fact be a very long string, which we should be prepared for.",
- "phenotype_groups": [
- ""
- ],
- "phenotype_terms": [
- ""
- ],
- "priority": "standard",
- "quantity": "220",
- "sex": "female",
- "source": "tissue (fresh frozen)",
- "status": "affected",
- "subject_id": "subject2",
- "tumour": false,
- "volume": "2",
- "well_position": "B:1"
- },
- {
- "age_at_sampling": "3.0",
- "application": "WGSPCFC030",
- "cohorts": ["Other"],
- "container": "96 well plate",
- "container_name": "CMMS",
- "data_analysis": "mip-dna",
- "data_delivery": "scout",
- "family_name": "family1",
- "name": "sample3",
- "panels": [
- "IEM"
- ],
- "phenotype_groups": [
- ""
- ],
- "phenotype_terms": [
- ""
- ],
- "priority": "standard",
- "quantity": "220",
- "sex": "male",
- "source": "tissue (fresh frozen)",
- "status": "affected",
- "subject_id": "subject3",
- "synopsis":"As for the synopsis it will be this overly complex sentence to prove that the synopsis field might in fact be a very long string, which we should be prepared for.",
- "tumour": false,
- "volume": "3",
- "well_position": "C:1"
- },
- {
- "age_at_sampling": "4.0",
- "application": "WGSPCFC030",
- "cohorts": ["Other"],
- "container": "96 well plate",
- "container_name": "CMMS",
- "data_analysis": "mip-dna",
- "data_delivery": "scout",
- "family_name": "family2",
- "name": "sample4",
- "panels": [
- "IEM"
- ],
- "phenotype_groups": [
- ""
- ],
- "phenotype_terms": [
- ""
- ],
- "priority": "standard",
- "quantity": "220",
- "sex": "female",
- "source": "tissue (fresh frozen)",
- "status": "affected",
- "subject_id": "subjectsample4",
- "synopsis": "As for the synopsis it will be this overly complex sentence to prove that the synopsis field might in fact be a very long string, which we should be prepared for.",
- "tumour": false,
- "volume": "4",
- "well_position": "D:1"
- }
- ]
-}
-
+ "cases": [
+ {
+ "cohorts": null,
+ "name": "MipCase1",
+ "panels": [
+ "AID"
+ ],
+ "priority": "standard",
+ "samples": [
+ {
+ "age_at_sampling": null,
+ "application": "WGSPCFC030",
+ "capture_kit": null,
+ "comment": "Test comment",
+ "concentration_ng_ul": null,
+ "container": "96 well plate",
+ "container_name": "MipPlate",
+ "control": null,
+ "data_analysis": null,
+ "data_delivery": null,
+ "elution_buffer": "Nuclease-free water",
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "mother": null,
+ "name": "MipSample1",
+ "phenotype_groups": [
+ "PhGroup"
+ ],
+ "phenotype_terms": [
+ "PhTerm1",
+ "PhTerm2"
+ ],
+ "post_formalin_fixation_time": null,
+ "priority": null,
+ "quantity": null,
+ "reference_genome": null,
+ "require_qc_ok": false,
+ "sex": "male",
+ "source": "blood",
+ "source_comment": null,
+ "status": "affected",
+ "subject_id": "Subject1",
+ "tissue_block_size": null,
+ "tumour": false,
+ "tumour_purity": null,
+ "volume": 54,
+ "well_position": "A:1"
+ },
+ {
+ "age_at_sampling": null,
+ "application": "WGSPCFC030",
+ "capture_kit": null,
+ "comment": "Test comment",
+ "concentration_ng_ul": null,
+ "container": "96 well plate",
+ "container_name": "MipPlate",
+ "control": null,
+ "data_analysis": null,
+ "data_delivery": null,
+ "elution_buffer": "Nuclease-free water",
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "mother": null,
+ "name": "MipSample2",
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "post_formalin_fixation_time": null,
+ "priority": null,
+ "quantity": null,
+ "reference_genome": null,
+ "require_qc_ok": false,
+ "sex": "female",
+ "source": "blood",
+ "source_comment": null,
+ "status": "affected",
+ "subject_id": "Subject2",
+ "tissue_block_size": null,
+ "tumour": false,
+ "tumour_purity": null,
+ "volume": 54,
+ "well_position": "B:1"
+ },
+ {
+ "age_at_sampling": null,
+ "application": "WGSPCFC030",
+ "capture_kit": null,
+ "comment": null,
+ "concentration_ng_ul": null,
+ "container": "96 well plate",
+ "container_name": "MipPlate",
+ "control": null,
+ "data_analysis": null,
+ "data_delivery": null,
+ "elution_buffer": "Nuclease-free water",
+ "family_name": null,
+ "father": "MipSample1",
+ "formalin_fixation_time": null,
+ "mother": "MipSample2",
+ "name": "MipSample3",
+ "phenotype_groups": ["Phenotype-group"],
+ "phenotype_terms": ["HP:0012747", "HP:0025049"],
+ "post_formalin_fixation_time": null,
+ "priority": null,
+ "quantity": null,
+ "reference_genome": null,
+ "require_qc_ok": false,
+ "sex": "female",
+ "source": "blood",
+ "source_comment": null,
+ "status": "affected",
+ "subject_id": "Subject3",
+ "tissue_block_size": null,
+ "tumour": false,
+ "tumour_purity": null,
+ "volume": 54,
+ "well_position": "C:1"
+ }
+ ],
+ "synopsis": "This is a long string to test the buffer length because surely this is the best way to do this and there are no better ways of doing this."
+ },
+ {
+ "cohorts": null,
+ "name": "MipCase2",
+ "panels": [
+ "Ataxi"
+ ],
+ "priority": "standard",
+ "samples": [
+ {
+ "age_at_sampling": null,
+ "application": "WGSWPFC030",
+ "capture_kit": null,
+ "comment": null,
+ "concentration_ng_ul": null,
+ "container": "96 well plate",
+ "container_name": "MipPlate",
+ "control": null,
+ "data_analysis": null,
+ "data_delivery": null,
+ "elution_buffer": "Nuclease-free water",
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "mother": null,
+ "name": "MipSample4",
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "post_formalin_fixation_time": null,
+ "priority": null,
+ "quantity": null,
+ "reference_genome": null,
+ "require_qc_ok": false,
+ "sex": "male",
+ "source": "blood",
+ "source_comment": null,
+ "status": "affected",
+ "subject_id": "Subject4",
+ "tissue_block_size": null,
+ "tumour": false,
+ "tumour_purity": null,
+ "volume": 54,
+ "well_position": "D:1"
+ }
+ ],
+ "synopsis": null
+ }
+ ],
+ "comment": null,
+ "customer": "cust000",
+ "data_analysis": "mip-dna",
+ "data_delivery": null,
+ "delivery_type": "analysis-scout",
+ "name": "MipOrder",
+ "project_type": "mip-dna",
+ "ticket": null,
+ "user_id": 1
+}
\ No newline at end of file
diff --git a/tests/fixtures/cgweb_orders/mip_rna.json b/tests/fixtures/cgweb_orders/mip_rna.json
index 2b38b64498..b680270379 100644
--- a/tests/fixtures/cgweb_orders/mip_rna.json
+++ b/tests/fixtures/cgweb_orders/mip_rna.json
@@ -1,60 +1,95 @@
{
- "name": "#123456",
- "customer": "cust003",
- "comment": "",
- "samples": [
- {
- "application": "RNAPOAR025",
- "cohorts": [
- ""
- ],
- "container": "96 well plate",
- "container_name": "CMMS",
- "data_analysis": "mip-rna",
- "data_delivery": "scout",
- "family_name": "family1",
- "name": "sample1-rna-t1",
- "phenotype_groups": [
- ""
- ],
- "phenotype_terms": [
- ""
- ],
- "priority": "standard",
- "quantity": "220",
- "sex": "female",
- "source": "tissue (fresh frozen)",
- "synopsis": "",
- "subject_id": "subject-sample1-rna-t1",
- "volume": "1",
- "well_position": "A:1"
- },
- {
- "application": "RNAPOAR025",
- "cohorts": [
- ""
- ],
- "comment": "this is a sample comment",
- "container": "96 well plate",
- "container_name": "CMMS",
- "data_analysis": "mip-rna",
- "data_delivery": "scout",
- "family_name": "family1",
- "name": "sample1-rna-t2",
- "phenotype_groups": [
- ""
- ],
- "phenotype_terms": [
- ""
- ],
- "priority": "standard",
- "quantity": "220",
- "sex": "female",
- "source": "tissue (fresh frozen)",
- "synopsis": "",
- "subject_id": "subject-sample1-rna-t2",
- "volume": "2",
- "well_position": "B:1"
- }
- ]
-}
+ "cases": [
+ {
+ "cohorts": [
+ "Cohort1",
+ "Cohort2"
+ ],
+ "name": "MipRNACase",
+ "panels": null,
+ "priority": "research",
+ "samples": [
+ {
+ "age_at_sampling": "29",
+ "application": "RNAPOAR025",
+ "comment": "This is a sample comment",
+ "container": "96 well plate",
+ "container_name": "MipRNAContainer",
+ "control": "negative",
+ "elution_buffer": "Nuclease-free water",
+ "name": "MipRNASample1",
+ "phenotype_groups": [
+ "phengroup1",
+ "phengroup2"
+ ],
+ "phenotype_terms": [
+ "phenterm1",
+ "phenterm2"
+ ],
+ "processedAt": "2024-12-18T09:59:27.336Z",
+ "require_qc_ok": true,
+ "sex": "female",
+ "source": "buccal swab",
+ "subject_id": "miprnasubject1",
+ "volume": 54,
+ "well_position": "A:1"
+ },
+ {
+ "age_at_sampling": "43",
+ "application": "RNAPOAR025",
+ "comment": "This is another sample comment",
+ "container": "96 well plate",
+ "container_name": "MipRNAContainer",
+ "elution_buffer": "Tris-HCl",
+ "name": "MipRNASample2",
+ "phenotype_groups": [
+ "phengroup3"
+ ],
+ "phenotype_terms": [
+ "phenterm4"
+ ],
+ "processedAt": "2024-12-18T09:59:27.337Z",
+ "require_qc_ok": true,
+ "sex": "female",
+ "source": "blood",
+ "subject_id": "miprnasubject2",
+ "volume": 54,
+ "well_position": "B:1"
+ }
+ ],
+ "synopsis": "This is a synopsis"
+ },
+ {
+ "cohorts": null,
+ "name": "MipRNACase2",
+ "panels": null,
+ "priority": "research",
+ "samples": [
+ {
+ "age_at_sampling": "66",
+ "application": "RNAPOAR025",
+ "container": "96 well plate",
+ "container_name": "MipRNAContainer",
+ "name": "MipRNASample3",
+ "processedAt": "2024-12-18T09:59:27.337Z",
+ "require_qc_ok": true,
+ "sex": "female",
+ "source": "blood",
+ "subject_id": "miprnasubject3",
+ "volume": 54,
+ "well_position": "C:1"
+ }
+ ],
+ "synopsis": null
+ }
+ ],
+ "comment": "This is an order comment",
+ "customer": "cust000",
+ "data_delivery": "analysis-scout",
+ "dataAnalysis": "mip-rna",
+ "delivery_type": "analysis-scout",
+ "name": "MipRnaOrder",
+ "project_type": "mip-rna",
+ "ticket": null,
+ "user_id": 1
+}
\ No newline at end of file
diff --git a/tests/fixtures/cgweb_orders/pacbio.json b/tests/fixtures/cgweb_orders/pacbio.json
index 69ea4a7cab..300dc04b07 100644
--- a/tests/fixtures/cgweb_orders/pacbio.json
+++ b/tests/fixtures/cgweb_orders/pacbio.json
@@ -1,10 +1,13 @@
{
"name": "PacbioOrder",
"customer": "cust000",
+ "delivery_type": "bam",
+ "project_type": "pacbio-long-read",
+ "user_id": 0,
"comment": "",
"samples": [
{
- "application": "WGSPCFC060",
+ "application": "LWPBELB070",
"comment": "",
"container": "Tube",
"container_name": "prov1",
@@ -13,16 +16,17 @@
"elution_buffer": "Nuclease-free water",
"name": "prov1",
"priority": "priority",
- "sex": "female",
+ "sex": "male",
"source": "blood",
"tumour": false,
"volume": "25",
+ "require_qc_ok": false,
"well_position": "",
"buffer": "Nuclease-free water",
"subject_id": "subject2"
},
{
- "application": "WGSPCFC060",
+ "application": "LWPBELB070",
"comment": "",
"container": "Tube",
"container_name": "prov2",
@@ -35,9 +39,29 @@
"source": "cell line",
"tumour": true,
"volume": "35",
+ "require_qc_ok": false,
"well_position": "",
"buffer": "Nuclease-free water",
"subject_id": "subject1"
+ },
+ {
+ "application": "LWPBELB070",
+ "comment": "",
+ "container": "96 well plate",
+ "container_name": "plate1",
+ "data_analysis": "raw-data",
+ "data_delivery": "bam",
+ "elution_buffer": "Nuclease-free water",
+ "name": "prov3",
+ "priority": "priority",
+ "sex": "male",
+ "source": "blood",
+ "tumour": false,
+ "volume": "35",
+ "require_qc_ok": false,
+ "well_position": "A:1",
+ "buffer": "Nuclease-free water",
+ "subject_id": "subject3"
}
]
}
\ No newline at end of file
diff --git a/tests/fixtures/cgweb_orders/rml.json b/tests/fixtures/cgweb_orders/rml.json
index 41ae4b810f..d79932115d 100644
--- a/tests/fixtures/cgweb_orders/rml.json
+++ b/tests/fixtures/cgweb_orders/rml.json
@@ -1,77 +1,249 @@
{
- "name": "#123456",
- "customer": "cust000",
- "comment": "order comment",
- "samples": [
- {
- "application": "RMLP05R800",
- "comment": "test comment",
- "concentration": "5",
- "concentration_sample": "6",
- "control": "negative",
- "data_analysis": "raw-data",
- "data_delivery": "fastq",
- "index": "IDT DupSeq 10 bp Set B",
- "index_number": "1",
- "index_sequence": "A01 - D701-D501 (ATTACTCG-TATAGCCT)",
- "name": "sample1",
- "pool": "pool-1",
- "priority": "research",
- "volume": "30"
- },
- {
- "application": "RMLP05R800",
- "comment": "",
- "concentration": "5",
- "concentration_sample": "6",
- "control": "positive",
- "data_analysis": "raw-data",
- "data_delivery": "fastq",
- "index": "IDT DupSeq 10 bp Set B",
- "index_number": "2",
- "index_sequence": "B01 - D701-D502 (ATTACTCG-ATAGAGGC)",
- "name": "sample2",
- "pool": "pool-1",
- "priority": "research",
- "rml_plate_name": "",
- "volume": "30",
- "well_position_rml": ""
- },
- {
- "application": "RMLP05R800",
- "comment": "test comment",
- "concentration": "5",
- "concentration_sample": "6",
- "control": "",
- "data_analysis": "raw-data",
- "data_delivery": "fastq",
- "index": "IDT DupSeq 10 bp Set B",
- "index_number": "3",
- "index_sequence": "A01 - D701-D501 (ATTACTCG-TATAGCCT)",
- "name": "sample3",
- "pool": "pool-2",
- "priority": "research",
- "rml_plate_name": "plate1",
- "volume": "30",
- "well_position_rml": "A:1"
- },
- {
- "application": "RMLP05R800",
- "comment": "",
- "concentration": "5",
- "concentration_sample": "6",
- "control": "",
- "data_analysis": "raw-data",
- "data_delivery": "fastq",
- "index": "IDT DupSeq 10 bp Set B",
- "index_number": "4",
- "index_sequence": "B01 - D701-D502 (ATTACTCG-ATAGAGGC)",
- "name": "sample4",
- "pool": "pool-2",
- "priority": "research",
- "rml_plate_name": "plate1",
- "volume": "30",
- "well_position_rml": "A:1"
- }
- ]
-}
+ "cases": [],
+ "comment": null,
+ "customer": "cust000",
+ "data_delivery": null,
+ "delivery_type": "fastq",
+ "name": "1604.19.rml",
+ "project_type": "rml",
+ "samples": [
+ {
+ "age_at_sampling": null,
+ "application": "RMLP15R100",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "comment",
+ "concentration": "2",
+ "concentration_ng_ul": null,
+ "concentration_sample": "4",
+ "container": null,
+ "container_name": null,
+ "control": "positive",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "RAW-DATA",
+ "data_delivery": "fastq",
+ "elution_buffer": null,
+ "extraction_method": null,
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": "IDT DupSeq 10 bp Set B",
+ "index_number": "3",
+ "index_sequence": "",
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "rmlsample1",
+ "organism": null,
+ "organism_other": null,
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": "pool1",
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "research",
+ "quantity": null,
+ "reagent_label": "C01 IDT_10nt_568 (TGTGAGCGAA-AACTCCGATC)",
+ "reference_genome": null,
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": false,
+ "rml_plate_name": "plate1",
+ "selection_criteria": null,
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "20",
+ "well_position": null,
+ "well_position_rml": "A:1"
+ },
+ {
+ "age_at_sampling": null,
+ "application": "RMLP15R200",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "",
+ "concentration": "2",
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": null,
+ "container_name": null,
+ "control": "negative",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "RAW-DATA",
+ "data_delivery": "fastq",
+ "elution_buffer": null,
+ "extraction_method": null,
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": "TruSeq DNA HT Dual-index (D7-D5)",
+ "index_number": "3",
+ "index_sequence": "C01 - D701-D503 (ATTACTCG-CCTATCCT)",
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "rmlsample2",
+ "organism": null,
+ "organism_other": null,
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": "pool2",
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "clinical_trials",
+ "quantity": null,
+ "reagent_label": "C01 - D701-D503 (ATTACTCG-CCTATCCT)",
+ "reference_genome": null,
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": false,
+ "rml_plate_name": "rmlplate2",
+ "selection_criteria": null,
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "21",
+ "well_position": null,
+ "well_position_rml": "A:1"
+ },
+ {
+ "age_at_sampling": null,
+ "application": "RMLP15R400",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "",
+ "concentration": "2",
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": null,
+ "container_name": null,
+ "control": "",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "RAW-DATA",
+ "data_delivery": "fastq",
+ "elution_buffer": null,
+ "extraction_method": null,
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": "NEXTflex® v2 UDI Barcodes 1 - 96",
+ "index_number": "3",
+ "index_sequence": "UDI 3 (CGCTGCTC-GGCAGATC)",
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "rmlsample3",
+ "organism": null,
+ "organism_other": null,
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": "pool3",
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "standard",
+ "quantity": null,
+ "reagent_label": "UDI3 (CGCTGCTC-GGCAGATC)",
+ "reference_genome": null,
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": false,
+ "rml_plate_name": "rmlplate3",
+ "selection_criteria": null,
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "22",
+ "well_position": null,
+ "well_position_rml": "A:1"
+ },
+ {
+ "age_at_sampling": null,
+ "application": "RMLP15R500",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "",
+ "concentration": "2",
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": null,
+ "container_name": null,
+ "control": "",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "RAW-DATA",
+ "data_delivery": "fastq",
+ "elution_buffer": null,
+ "extraction_method": null,
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": "NEXTflex® v2 UDI Barcodes 1 - 96",
+ "index_number": "3",
+ "index_sequence": "UDI 3 (CGCTGCTC-GGCAGATC)",
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "rmlsample4",
+ "organism": null,
+ "organism_other": null,
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": "pool4",
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "priority",
+ "quantity": null,
+ "reagent_label": "UDI 3 (CGCTGCTC-GGCAGATC)",
+ "reference_genome": null,
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": false,
+ "rml_plate_name": "rmplate4",
+ "selection_criteria": null,
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "23",
+ "well_position": null,
+ "well_position_rml": "A:1"
+ }
+ ],
+ "ticket": null,
+ "user_id": 1
+}
\ No newline at end of file
diff --git a/tests/fixtures/cgweb_orders/rnafusion.json b/tests/fixtures/cgweb_orders/rnafusion.json
index 203329ffc7..d9fcfaf8e4 100644
--- a/tests/fixtures/cgweb_orders/rnafusion.json
+++ b/tests/fixtures/cgweb_orders/rnafusion.json
@@ -1,60 +1,70 @@
{
- "name": "#123456",
- "customer": "cust003",
- "comment": "",
- "samples": [
- {
- "application": "RNAPOAR025",
- "cohorts": [
- ""
- ],
- "container": "96 well plate",
- "container_name": "CMMS",
- "data_analysis": "rnafusion",
- "data_delivery": "scout",
- "family_name": "family1",
- "name": "sample1-rna-t1",
- "phenotype_groups": [
- ""
- ],
- "phenotype_terms": [
- ""
- ],
- "priority": "standard",
- "quantity": "220",
- "sex": "female",
- "source": "tissue (fresh frozen)",
- "synopsis": "",
- "subject_id": "subject-sample1-rna-t1",
- "volume": "1",
- "well_position": "A:1"
- },
- {
- "application": "RNAPOAR025",
- "cohorts": [
- ""
- ],
- "comment": "this is a sample comment",
- "container": "96 well plate",
- "container_name": "CMMS",
- "data_analysis": "rnafusion",
- "data_delivery": "scout",
- "family_name": "family2",
- "name": "sample1-rna-t2",
- "phenotype_groups": [
- ""
- ],
- "phenotype_terms": [
- ""
- ],
- "priority": "standard",
- "quantity": "220",
- "sex": "female",
- "source": "tissue (fresh frozen)",
- "synopsis": "",
- "subject_id": "subject-sample1-rna-t2",
- "volume": "2",
- "well_position": "B:1"
- }
- ]
+ "name": "RNAfusion-order",
+ "customer": "cust000",
+ "delivery_type": "fastq-analysis",
+ "project_type": "rnafusion",
+ "comment": "",
+ "cases": [
+ {
+ "cohorts": null,
+ "name": "RnaFusionCase",
+ "panels": null,
+ "priority": "standard",
+ "samples": [
+ {
+ "application": "RNAPOAR025",
+ "cohorts": [""],
+ "container": "96 well plate",
+ "container_name": "CMMS",
+ "data_analysis": null,
+ "data_delivery": null,
+ "family_name": "family1",
+ "name": "sample1-rna-t1",
+ "phenotype_groups": [""],
+ "phenotype_terms": [""],
+ "priority": "standard",
+ "quantity": "220",
+ "require_qc_ok": false,
+ "sex": "female",
+ "source": "tissue (fresh frozen)",
+ "synopsis": "",
+ "subject_id": "subject-sample1-rna-t1",
+ "volume": "120",
+ "well_position": "A:1"
+ }
+ ],
+ "synopsis": "A synopsis"
+ },
+ {
+ "cohorts": null,
+ "name": "RnaFusionCase2",
+ "panels": null,
+ "priority": "standard",
+ "samples": [
+ {
+ "application": "RNAPOAR025",
+ "cohorts": [""],
+ "comment": "this is a sample comment",
+ "container": "96 well plate",
+ "container_name": "CMMS",
+ "data_analysis": null,
+ "data_delivery": null,
+ "family_name": "family2",
+ "name": "sample1-rna-t2",
+ "phenotype_groups": [""],
+ "phenotype_terms": [""],
+ "priority": "standard",
+ "quantity": "220",
+ "require_qc_ok": false,
+ "sex": "female",
+ "source": "tissue (fresh frozen)",
+ "synopsis": "",
+ "subject_id": "subject-sample1-rna-t2",
+ "volume": "20",
+ "well_position": "B:1"
+ }
+ ],
+ "synopsis": "A synopsis"
+ }
+ ]
}
diff --git a/tests/fixtures/cgweb_orders/sarscov2.json b/tests/fixtures/cgweb_orders/sarscov2.json
index d8af7dbaa2..6bbbdd0cf0 100644
--- a/tests/fixtures/cgweb_orders/sarscov2.json
+++ b/tests/fixtures/cgweb_orders/sarscov2.json
@@ -1,161 +1,368 @@
{
- "name": "Sars-CoV-2 samples",
- "customer": "cust002",
- "comment": "Order comment",
- "samples": [
- {
- "application": "VWGDPTR001",
- "collection_date": "2021-05-05",
- "comment": "plate comment",
- "container": "96 well plate",
- "container_name": "name of plate",
- "data_analysis": "mutant",
- "data_delivery": "fastq",
- "elution_buffer": "Nuclease-free water",
- "extraction_method": "MagNaPure 96",
- "lab_code": "SE110 Växjö",
- "name": "all-fields",
- "organism": "SARS CoV-2",
- "original_lab": "Karolinska University Hospital Solna",
- "original_lab_address": "171 76 Stockholm",
- "pre_processing_method": "COVIDSeq",
- "primer": "Illumina Artic V3",
- "priority": "research",
- "reference_genome": "NC_111",
- "region": "Stockholm",
- "region_code": "01",
- "require_qc_ok": true,
- "selection_criteria": "1. Allmän övervakning",
- "volume": "1",
- "well_position": "A:1"
- },
- {
- "application": "VWGDPTR001",
- "collection_date": "2021-05-05",
- "comment": "",
- "container": "Tube",
- "container_name": "required-fields",
- "data_analysis": "mutant",
- "data_delivery": "fastq",
- "elution_buffer": "Nuclease-free water",
- "extraction_method": "MagNaPure 96",
- "lab_code": "SE110 Växjö",
- "name": "required-fields",
- "organism": "SARS CoV-2",
- "original_lab": "Karolinska University Hospital Solna",
- "original_lab_address": "171 76 Stockholm",
- "pre_processing_method": "COVIDSeq",
- "primer": "Illumina Artic V3",
- "priority": "standard",
- "reference_genome": "NC_222",
- "region": "Stockholm",
- "region_code": "01",
- "require_qc_ok": true,
- "selection_criteria": "1. Allmän övervakning",
- "volume": "2",
- "well_position": ""
- },
- {
- "application": "VWGDPTR001",
- "collection_date": "2021-05-05",
- "comment": "",
- "container": "96 well plate",
- "container_name": "name of plate",
- "data_analysis": "mutant",
- "data_delivery": "fastq",
- "elution_buffer": "Nuclease-free water",
- "extraction_method": "MagNaPure 96",
- "lab_code": "SE110 Växjö",
- "name": "plate-fields",
- "organism": "SARS CoV-2",
- "original_lab": "Karolinska University Hospital Solna",
- "original_lab_address": "171 76 Stockholm",
- "pre_processing_method": "COVIDSeq",
- "primer": "Nanopore Midnight V1",
- "priority": "research",
- "reference_genome": "NC_333",
- "region": "Stockholm",
- "region_code": "01",
- "require_qc_ok": true,
- "selection_criteria": "1. Allmän övervakning",
- "volume": "3",
- "well_position": "A:2"
- },
- {
- "application": "VWGDPTR001",
- "collection_date": "2021-05-05",
- "comment": "",
- "container": "Tube",
- "container_name": "other-species-fields",
- "data_analysis": "mutant",
- "data_delivery": "fastq",
- "elution_buffer": "Nuclease-free water",
- "extraction_method": "MagNaPure 96",
- "lab_code": "SE110 Växjö",
- "name": "other-species-fields",
- "organism": "SARS CoV-2",
- "original_lab": "Karolinska University Hospital Solna",
- "original_lab_address": "171 76 Stockholm",
- "pre_processing_method": "COVIDSeq",
- "primer": "Nanopore Midnight V1",
- "priority": "research",
- "reference_genome": "NC_444",
- "region": "Stockholm",
- "region_code": "01",
- "require_qc_ok": true,
- "selection_criteria": "1. Allmän övervakning",
- "volume": "4",
- "well_position": ""
- },
- {
- "application": "VWGDPTR001",
- "collection_date": "2021-05-05",
- "comment": "optional comment",
- "container": "Tube",
- "container_name": "optional-fields",
- "data_analysis": "mutant",
- "data_delivery": "fastq",
- "elution_buffer": "Nuclease-free water",
- "extraction_method": "MagNaPure 96",
- "lab_code": "SE110 Växjö",
- "name": "optional-fields",
- "organism": "SARS CoV-2",
- "original_lab": "Karolinska University Hospital Solna",
- "original_lab_address": "171 76 Stockholm",
- "pre_processing_method": "COVIDSeq",
- "primer": "Nanopore Midnight V1",
- "priority": "research",
- "reference_genome": "NC_555",
- "region": "Stockholm",
- "region_code": "01",
- "require_qc_ok": false,
- "selection_criteria": "1. Allmän övervakning",
- "volume": "5",
- "well_position": ""
- },
- {
- "application": "VWGDPTR001",
- "collection_date": "2021-05-05",
- "comment": "optional comment",
- "container": "Tube",
- "container_name": "optional-fields",
- "data_analysis": "mutant",
- "data_delivery": "fastq",
- "elution_buffer": "Nuclease-free water",
- "extraction_method": "MagNaPure 96",
- "lab_code": "SE110 Växjö",
- "name": "missing-region-code-and-original-lab-address",
- "organism": "SARS CoV-2",
- "original_lab": "Karolinska University Hospital Solna",
- "pre_processing_method": "COVIDSeq",
- "primer": "Nanopore Midnight V1",
- "priority": "research",
- "reference_genome": "NC_555",
- "region": "Stockholm",
- "require_qc_ok": false,
- "selection_criteria": "1. Allmän övervakning",
- "volume": "5",
- "well_position": ""
- }
- ]
-}
+ "cases": [],
+ "comment": null,
+ "customer": "cust000",
+ "data_analysis": null,
+ "data_delivery": null,
+ "delivery_type": "fastq-analysis",
+ "name": "2184.9.sarscov2",
+ "project_type": "sars-cov-2",
+ "samples": [
+ {
+ "age_at_sampling": null,
+ "application": "VWGDPTR001",
+ "capture_kit": null,
+ "collection_date": "2021-05-05",
+ "comment": "sample comment",
+ "concentration": null,
+ "concentration_ng_ul": null,
+ "concentration_sample": "2",
+ "container": "96 well plate",
+ "container_name": "plate1",
+ "control": "positive",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "Mutant",
+ "data_delivery": "fastq",
+ "elution_buffer": "Nuclease-free water",
+ "extraction_method": "MagNaPure 96",
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": null,
+ "index_number": null,
+ "index_sequence": null,
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "control-positive",
+ "organism": "SARS-CoV-2",
+ "organism_other": "",
+ "original_lab": "Karolinska University Hospital Solna",
+ "original_lab_address": "",
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": null,
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": "COVIDSeq",
+ "primer": "Illumina Artic V3",
+ "priority": "research",
+ "quantity": "3",
+ "reagent_label": null,
+ "reference_genome": "NC_111",
+ "region": "Stockholm",
+ "region_code": "",
+ "require_qc_ok": null,
+ "rml_plate_name": null,
+ "selection_criteria": "Allmän övervakning",
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "20",
+ "well_position": "A:1",
+ "well_position_rml": null
+ },
+ {
+ "age_at_sampling": null,
+ "application": "VWGDPTR001",
+ "capture_kit": null,
+ "collection_date": "2021-05-06",
+ "comment": "",
+ "concentration": null,
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": "Tube",
+ "container_name": "",
+ "control": "negative",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "Mutant",
+ "data_delivery": "fastq",
+ "elution_buffer": "Other (specify in \"Comments\")",
+ "extraction_method": "EZ1",
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": null,
+ "index_number": null,
+ "index_sequence": null,
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "control-negative",
+ "organism": "SARS-CoV-2",
+ "organism_other": "",
+ "original_lab": "Synlab Medilab",
+ "original_lab_address": "183 53 Täby",
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": null,
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": "Qiagen SARS-CoV-2 Primer Panel",
+ "primer": "Illumina Artic V3",
+ "priority": "research",
+ "quantity": null,
+ "reagent_label": null,
+ "reference_genome": "NC_000002",
+ "region": "Uppsala",
+ "region_code": "03",
+ "require_qc_ok": null,
+ "rml_plate_name": null,
+ "selection_criteria": "Allmän övervakning öppenvård",
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "21",
+ "well_position": "",
+ "well_position_rml": null
+ },
+ {
+ "age_at_sampling": null,
+ "application": "VWGDPTR001",
+ "capture_kit": null,
+ "collection_date": "2021-05-07",
+ "comment": "",
+ "concentration": null,
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": "Tube",
+ "container_name": "",
+ "control": "",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "Mutant",
+ "data_delivery": "fastq",
+ "elution_buffer": "Tris-HCl",
+ "extraction_method": "QIAsymphony",
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": null,
+ "index_number": null,
+ "index_sequence": null,
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "sarscov2sample3",
+ "organism": "SARS-CoV-2",
+ "organism_other": "",
+ "original_lab": "A05 Diagnostics",
+ "original_lab_address": "171 65 Solna",
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": null,
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": "COVIDSeq",
+ "primer": "Illumina Artic V3",
+ "priority": "research",
+ "quantity": null,
+ "reagent_label": null,
+ "reference_genome": "NC_000003",
+ "region": "Sörmland",
+ "region_code": "04",
+ "require_qc_ok": null,
+ "rml_plate_name": null,
+ "selection_criteria": "Allmän övervakning slutenvård",
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "22",
+ "well_position": "",
+ "well_position_rml": null
+ },
+ {
+ "age_at_sampling": null,
+ "application": "VWGDPTR001",
+ "capture_kit": null,
+ "collection_date": "2021-05-08",
+ "comment": "",
+ "concentration": null,
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": "Tube",
+ "container_name": "",
+ "control": "",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "Mutant",
+ "data_delivery": "fastq",
+ "elution_buffer": "Tris-HCl",
+ "extraction_method": "Qiagen MagAttract",
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": null,
+ "index_number": null,
+ "index_sequence": null,
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "sarscov2sample4",
+ "organism": "SARS-CoV-2",
+ "organism_other": "",
+ "original_lab": "Karolinska University Hospital Solna",
+ "original_lab_address": "171 76 Stockholm",
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": null,
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": "COVIDSeq",
+ "primer": "Illumina Artic V3",
+ "priority": "research",
+ "quantity": null,
+ "reagent_label": null,
+ "reference_genome": "NC_000004",
+ "region": "Östergötland",
+ "region_code": "05",
+ "require_qc_ok": null,
+ "rml_plate_name": null,
+ "selection_criteria": "Utlandsvistelse",
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "23",
+ "well_position": "",
+ "well_position_rml": null
+ },
+ {
+ "age_at_sampling": null,
+ "application": "VWGDPTR001",
+ "capture_kit": null,
+ "collection_date": "2021-05-09",
+ "comment": "",
+ "concentration": null,
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": "Tube",
+ "container_name": "",
+ "control": "",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "Mutant",
+ "data_delivery": "fastq",
+ "elution_buffer": "Tris-HCl",
+ "extraction_method": "Other (specify in \"Comments\")",
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": null,
+ "index_number": null,
+ "index_sequence": null,
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "sarscov2sample5",
+ "organism": "SARS-CoV-2",
+ "organism_other": "",
+ "original_lab": "Karolinska University Hospital Huddinge",
+ "original_lab_address": "141 86 Stockholm",
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": null,
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": "COVIDSeq",
+ "primer": "Illumina Artic V3",
+ "priority": "research",
+ "quantity": null,
+ "reagent_label": null,
+ "reference_genome": "NC_000005",
+ "region": "Jönköpings län",
+ "region_code": "06",
+ "require_qc_ok": null,
+ "rml_plate_name": null,
+ "selection_criteria": "Riktad insamling",
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "24",
+ "well_position": "",
+ "well_position_rml": null
+ },
+ {
+ "age_at_sampling": null,
+ "application": "VWGDPTR001",
+ "capture_kit": null,
+ "collection_date": "2021-05-10",
+ "comment": "",
+ "concentration": null,
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": "Tube",
+ "container_name": "",
+ "control": "",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "Mutant",
+ "data_delivery": "fastq",
+ "elution_buffer": "Tris-HCl",
+ "extraction_method": "MagNaPure 96",
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": null,
+ "index_number": null,
+ "index_sequence": null,
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "sarscov2sample6",
+ "organism": "other",
+ "organism_other": "unknown",
+ "original_lab": "LaboratorieMedicinskt Centrum Gotland",
+ "original_lab_address": "621 84 Visby",
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": null,
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": "COVIDSeq",
+ "primer": "Illumina Artic V3",
+ "priority": "research",
+ "quantity": null,
+ "reagent_label": null,
+ "reference_genome": "NC_000006",
+ "region": "Kronoberg",
+ "region_code": "07",
+ "require_qc_ok": null,
+ "rml_plate_name": null,
+ "selection_criteria": "Utbrott",
+ "sex": null,
+ "source": null,
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": "25",
+ "well_position": "",
+ "well_position_rml": null
+ }
+ ],
+ "ticket": null,
+ "user_id": 1
+}
\ No newline at end of file
diff --git a/tests/fixtures/cgweb_orders/taxprofiler.json b/tests/fixtures/cgweb_orders/taxprofiler.json
new file mode 100644
index 0000000000..59d56f6878
--- /dev/null
+++ b/tests/fixtures/cgweb_orders/taxprofiler.json
@@ -0,0 +1,191 @@
+{
+ "cases": [],
+ "comment": null,
+ "customer": "cust000",
+ "data_analysis": null,
+ "data_delivery": null,
+ "delivery_type": "fastq-analysis",
+ "name": "taxprofiler-order",
+ "project_type": "taxprofiler",
+ "samples": [
+ {
+ "age_at_sampling": null,
+ "application": "METWPFR030",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "comments",
+ "concentration": null,
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": "96 well plate",
+ "container_name": "plate1",
+ "control": "positive",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "TAXPROFILER",
+ "data_delivery": "fastq qc + analysis",
+ "elution_buffer": "Tris-HCl",
+ "extraction_method": null,
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": null,
+ "index_number": null,
+ "index_sequence": null,
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "taxprofilersample1",
+ "organism": null,
+ "organism_other": null,
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": null,
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "research",
+ "quantity": "3",
+ "reagent_label": null,
+ "reference_genome": null,
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": false,
+ "rml_plate_name": null,
+ "selection_criteria": null,
+ "sex": null,
+ "source": "blood",
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": 20,
+ "well_position": "A:1",
+ "well_position_rml": null
+ },
+ {
+ "age_at_sampling": null,
+ "application": "METWPFR030",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "comments",
+ "concentration": null,
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": "96 well plate",
+ "container_name": "plate1",
+ "control": "",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "TAXPROFILER",
+ "data_delivery": "fastq qc + analysis",
+ "elution_buffer": "Tris-HCl",
+ "extraction_method": null,
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": null,
+ "index_number": null,
+ "index_sequence": null,
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "taxprofilersample2",
+ "organism": null,
+ "organism_other": null,
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": null,
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "research",
+ "quantity": "6",
+ "reagent_label": null,
+ "reference_genome": null,
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": false,
+ "rml_plate_name": null,
+ "selection_criteria": null,
+ "sex": null,
+ "source": "blood",
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": 25,
+ "well_position": "B:2",
+ "well_position_rml": null
+ },
+ {
+ "age_at_sampling": null,
+ "application": "METWPFR030",
+ "capture_kit": null,
+ "collection_date": null,
+ "comment": "comments",
+ "concentration": null,
+ "concentration_ng_ul": null,
+ "concentration_sample": null,
+ "container": "96 well plate",
+ "container_name": "plate1",
+ "control": "",
+ "custom_index": null,
+ "customer": "cust000",
+ "data_analysis": "TAXPROFILER",
+ "data_delivery": "fastq qc + analysis",
+ "elution_buffer": "Tris-HCl",
+ "extraction_method": null,
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": null,
+ "index": null,
+ "index_number": null,
+ "index_sequence": null,
+ "internal_id": null,
+ "lab_code": null,
+ "mother": null,
+ "name": "taxprofilersample3",
+ "organism": null,
+ "organism_other": null,
+ "original_lab": null,
+ "original_lab_address": null,
+ "phenotype_groups": null,
+ "phenotype_terms": null,
+ "pool": null,
+ "post_formalin_fixation_time": null,
+ "pre_processing_method": null,
+ "primer": null,
+ "priority": "research",
+ "quantity": "5",
+ "reagent_label": null,
+ "reference_genome": null,
+ "region": null,
+ "region_code": null,
+ "require_qc_ok": false,
+ "rml_plate_name": null,
+ "selection_criteria": null,
+ "sex": null,
+ "source": "blood",
+ "status": null,
+ "subject_id": null,
+ "tissue_block_size": null,
+ "tumour": null,
+ "tumour_purity": null,
+ "verified_organism": null,
+ "volume": 22,
+ "well_position": "C:3",
+ "well_position_rml": null
+ }
+ ],
+ "ticket": null,
+ "user_id": 1
+}
\ No newline at end of file
diff --git a/tests/fixtures/cgweb_orders/tomte.json b/tests/fixtures/cgweb_orders/tomte.json
index 4753dc93df..c20a313bca 100644
--- a/tests/fixtures/cgweb_orders/tomte.json
+++ b/tests/fixtures/cgweb_orders/tomte.json
@@ -1,131 +1,150 @@
{
"name": "#123456",
- "customer": "cust003",
+ "customer": "cust000",
"comment": "",
- "samples": [
+ "delivery_type": "fastq-analysis",
+ "project_type": "tomte",
+ "cases": [
{
- "age_at_sampling": "17.18192",
- "application": "RNAPOAR025",
- "cohorts":["Other"],
- "comment": "comment",
- "container": "96 well plate",
- "container_name": "CMMS",
- "data_analysis": "tomte",
- "data_delivery": "scout",
- "family_name": "family1",
- "father": "sample3",
- "mother": "sample2",
- "name": "sample1",
- "panels": [
- "IEM"
- ],
- "phenotype_groups":["Phenotype-group"],
- "phenotype_terms":["HP:0012747","HP:0025049"],
+ "cohorts": null,
+ "name": "TomteCase",
+ "panels": ["OMIM-AUTO"],
"priority": "standard",
- "quantity": "220",
- "reference_genome": "hg19",
- "sex": "female",
- "source": "fibroblast",
- "status": "affected",
- "subject_id": "subject1",
- "synopsis":"As for the synopsis it will be this overly complex sentence to prove that the synopsis field might in fact be a very long string, which we should be prepared for.",
- "tumour": true,
- "volume": "1",
- "well_position": "A:1"
- },
- {
- "age_at_sampling": "2.0",
- "application": "RNAPOAR025",
- "cohorts": ["Other"],
- "comment": "this is a sample comment",
- "container": "96 well plate",
- "container_name": "CMMS",
- "data_analysis": "tomte",
- "data_delivery": "scout",
- "family_name": "family1",
- "name": "sample2",
- "panels": [
- "IEM"
- ],
- "synopsis":"As for the synopsis it will be this overly complex sentence to prove that the synopsis field might in fact be a very long string, which we should be prepared for.",
- "phenotype_groups": [
- ""
- ],
- "phenotype_terms": [
- ""
- ],
- "priority": "standard",
- "quantity": "220",
- "reference_genome": "hg19",
- "sex": "female",
- "source": "fibroblast",
- "status": "affected",
- "subject_id": "subject2",
- "tumour": false,
- "volume": "2",
- "well_position": "B:1"
- },
- {
- "age_at_sampling": "3.0",
- "application": "RNAPOAR025",
- "cohorts": ["Other"],
- "container": "96 well plate",
- "container_name": "CMMS",
- "data_analysis": "tomte",
- "data_delivery": "scout",
- "family_name": "family1",
- "name": "sample3",
- "panels": [
- "IEM"
- ],
- "phenotype_groups": [
- ""
- ],
- "phenotype_terms": [
- ""
- ],
- "priority": "standard",
- "quantity": "220",
- "reference_genome": "hg19",
- "sex": "male",
- "source": "tissue (fresh frozen)",
- "status": "affected",
- "subject_id": "subject3",
- "synopsis":"As for the synopsis it will be this overly complex sentence to prove that the synopsis field might in fact be a very long string, which we should be prepared for.",
- "tumour": false,
- "volume": "3",
- "well_position": "C:1"
- },
- {
- "age_at_sampling": "4.0",
- "application": "RNAPOAR025",
- "cohorts": ["Other"],
- "container": "96 well plate",
- "container_name": "CMMS",
- "data_analysis": "tomte",
- "data_delivery": "scout",
- "family_name": "family2",
- "name": "sample4",
- "panels": [
- "IEM"
- ],
- "phenotype_groups": [
- ""
- ],
- "phenotype_terms": [
- ""
- ],
- "priority": "standard",
- "quantity": "220",
- "reference_genome": "hg19",
- "sex": "female",
- "source": "tissue (fresh frozen)",
- "status": "affected",
- "subject_id": "subjectsample4",
- "synopsis": "As for the synopsis it will be this overly complex sentence to prove that the synopsis field might in fact be a very long string, which we should be prepared for.",
- "tumour": false,
- "volume": "4",
- "well_position": "D:1"
+ "samples": [
+ {
+ "age_at_sampling": "17.18192",
+ "application": "RNAPOAR025",
+ "cohorts": [
+ "Other"
+ ],
+ "comment": "comment",
+ "container": "96 well plate",
+ "container_name": "CMMS",
+ "data_analysis": "tomte",
+ "data_delivery": "scout",
+ "family_name": "family1",
+ "father": "sample3",
+ "mother": "sample2",
+ "name": "sample1",
+ "panels": ["IEM"],
+ "phenotype_groups": [
+ "Phenotype-group"
+ ],
+ "phenotype_terms": [
+ "HP:0012747",
+ "HP:0025049"
+ ],
+ "priority": "standard",
+ "quantity": "220",
+ "reference_genome": "hg19",
+ "require_qc_ok": false,
+ "sex": "female",
+ "source": "fibroblast",
+ "status": "affected",
+ "subject_id": "subject1",
+ "synopsis": "As for the synopsis it will be this overly complex sentence to prove that the synopsis field might in fact be a very long string, which we should be prepared for.",
+ "tumour": true,
+ "volume": "110",
+ "well_position": "A:1"
+ },
+ {
+ "age_at_sampling": "2.0",
+ "application": "RNAPOAR025",
+ "cohorts": [
+ "Other"
+ ],
+ "comment": "this is a sample comment",
+ "container": "96 well plate",
+ "container_name": "CMMS",
+ "data_analysis": "tomte",
+ "data_delivery": "scout",
+ "family_name": "family1",
+ "name": "sample2",
+ "panels": ["IEM"],
+ "synopsis": "As for the synopsis it will be this overly complex sentence to prove that the synopsis field might in fact be a very long string, which we should be prepared for.",
+ "phenotype_groups": [
+ ""
+ ],
+ "phenotype_terms": [
+ ""
+ ],
+ "priority": "standard",
+ "quantity": "220",
+ "reference_genome": "hg19",
+ "require_qc_ok": false,
+ "sex": "female",
+ "source": "fibroblast",
+ "status": "affected",
+ "subject_id": "subject2",
+ "tumour": false,
+ "volume": "25",
+ "well_position": "B:1"
+ },
+ {
+ "age_at_sampling": "3.0",
+ "application": "RNAPOAR025",
+ "cohorts": [
+ "Other"
+ ],
+ "container": "96 well plate",
+ "container_name": "CMMS",
+ "data_analysis": "tomte",
+ "data_delivery": "scout",
+ "family_name": "family1",
+ "name": "sample3",
+ "panels": ["OMIM-AUTO"],
+ "phenotype_groups": [
+ ""
+ ],
+ "phenotype_terms": [
+ ""
+ ],
+ "priority": "standard",
+ "quantity": "220",
+ "reference_genome": "hg19",
+ "require_qc_ok": false,
+ "sex": "male",
+ "source": "tissue (fresh frozen)",
+ "status": "affected",
+ "subject_id": "subject3",
+ "synopsis": "As for the synopsis it will be this overly complex sentence to prove that the synopsis field might in fact be a very long string, which we should be prepared for.",
+ "tumour": false,
+ "volume": "30",
+ "well_position": "C:1"
+ },
+ {
+ "age_at_sampling": "4.0",
+ "application": "RNAPOAR025",
+ "cohorts": [
+ "Other"
+ ],
+ "container": "96 well plate",
+ "container_name": "CMMS",
+ "data_analysis": "tomte",
+ "data_delivery": "scout",
+ "family_name": "family2",
+ "name": "sample4",
+ "panels": ["OMIM-AUTO"],
+ "phenotype_groups": [
+ ""
+ ],
+ "phenotype_terms": [
+ ""
+ ],
+ "priority": "standard",
+ "quantity": "220",
+ "reference_genome": "hg19",
+ "require_qc_ok": false,
+ "sex": "female",
+ "source": "tissue (fresh frozen)",
+ "status": "affected",
+ "subject_id": "subjectsample4",
+ "synopsis": "As for the synopsis it will be this overly complex sentence to prove that the synopsis field might in fact be a very long string, which we should be prepared for.",
+ "tumour": false,
+ "volume": "45",
+ "well_position": "D:1"
+ }
+ ]
}
]
}
diff --git a/tests/fixtures/invalid_cgweb_orders/balsamic_FAIL.json b/tests/fixtures/invalid_cgweb_orders/balsamic_FAIL.json
new file mode 100644
index 0000000000..f75ae2ddcd
--- /dev/null
+++ b/tests/fixtures/invalid_cgweb_orders/balsamic_FAIL.json
@@ -0,0 +1,61 @@
+{
+ "cases": [
+ {
+ "cohorts": null,
+ "name": "B",
+ "panels": null,
+ "priority": "FAIL",
+ "samples": [
+ {
+ "age_at_sampling": "17.2",
+ "application": "PANKTTR100",
+ "capture_kit": "GMCKsolid",
+ "comment": "This is a sample comment",
+ "concentration_ng_ul": null,
+ "container": "96 well plate",
+ "container_name": "BalsamicPlate",
+ "control": "FAIL",
+ "data_analysis": null,
+ "data_delivery": null,
+ "elution_buffer": "Tris-HCl",
+ "family_name": null,
+ "father": null,
+ "formalin_fixation_time": "15",
+ "mother": null,
+ "name": "BalsamicSample",
+ "phenotype_groups": [
+ "PhGroup"
+ ],
+ "phenotype_terms": [
+ "PhTerm"
+ ],
+ "post_formalin_fixation_time": "3",
+ "priority": null,
+ "quantity": null,
+ "reference_genome": null,
+ "require_qc_ok": false,
+ "sex": "male",
+ "source": "cytology (FFPE)",
+ "source_comment": null,
+ "status": null,
+ "subject_id": "Subject1",
+ "tissue_block_size": "large",
+ "tumour": true,
+ "tumour_purity": "13",
+ "volume": 42,
+ "well_position": "A:1"
+ }
+ ],
+ "synopsis": "A synopsis"
+ }
+ ],
+ "comment": null,
+ "customer": "cust000",
+ "data_analysis": "balsamic",
+ "data_delivery": null,
+ "delivery_type": "analysis-scout",
+ "name": "BalsamicOrder",
+ "project_type": "balsamic",
+ "ticket": null,
+ "user_id": 1
+}
\ No newline at end of file
diff --git a/tests/fixtures/orderforms/1508.32.balsamic.xlsx b/tests/fixtures/orderforms/1508.32.balsamic.xlsx
deleted file mode 100644
index 77e4a866f2..0000000000
Binary files a/tests/fixtures/orderforms/1508.32.balsamic.xlsx and /dev/null differ
diff --git a/tests/fixtures/orderforms/1508.32.balsamic_qc.xlsx b/tests/fixtures/orderforms/1508.32.balsamic_qc.xlsx
deleted file mode 100644
index 6d4c353546..0000000000
Binary files a/tests/fixtures/orderforms/1508.32.balsamic_qc.xlsx and /dev/null differ
diff --git a/tests/fixtures/orderforms/1508.32.balsamic_umi.xlsx b/tests/fixtures/orderforms/1508.32.balsamic_umi.xlsx
deleted file mode 100644
index 1bab195794..0000000000
Binary files a/tests/fixtures/orderforms/1508.32.balsamic_umi.xlsx and /dev/null differ
diff --git a/tests/fixtures/orderforms/1508.32.fastq.xlsx b/tests/fixtures/orderforms/1508.32.fastq.xlsx
deleted file mode 100644
index c22c334d01..0000000000
Binary files a/tests/fixtures/orderforms/1508.32.fastq.xlsx and /dev/null differ
diff --git a/tests/fixtures/orderforms/1508.32.metagenome.xlsx b/tests/fixtures/orderforms/1508.32.metagenome.xlsx
deleted file mode 100644
index b2f2382dfa..0000000000
Binary files a/tests/fixtures/orderforms/1508.32.metagenome.xlsx and /dev/null differ
diff --git a/tests/fixtures/orderforms/1508.32.mip.xlsx b/tests/fixtures/orderforms/1508.32.mip.xlsx
deleted file mode 100644
index a737db45b6..0000000000
Binary files a/tests/fixtures/orderforms/1508.32.mip.xlsx and /dev/null differ
diff --git a/tests/fixtures/orderforms/1508.32.mip_rna.xlsx b/tests/fixtures/orderforms/1508.32.mip_rna.xlsx
deleted file mode 100644
index 0d1d530ef4..0000000000
Binary files a/tests/fixtures/orderforms/1508.32.mip_rna.xlsx and /dev/null differ
diff --git a/tests/fixtures/orderforms/1508.32.rnafusion.xlsx b/tests/fixtures/orderforms/1508.32.rnafusion.xlsx
deleted file mode 100644
index d434221c2d..0000000000
Binary files a/tests/fixtures/orderforms/1508.32.rnafusion.xlsx and /dev/null differ
diff --git a/tests/fixtures/orderforms/1508.32.taxprofiler.xlsx b/tests/fixtures/orderforms/1508.32.taxprofiler.xlsx
deleted file mode 100644
index 74492b59a8..0000000000
Binary files a/tests/fixtures/orderforms/1508.32.taxprofiler.xlsx and /dev/null differ
diff --git a/tests/fixtures/orderforms/1508.32.tomte.xlsx b/tests/fixtures/orderforms/1508.32.tomte.xlsx
deleted file mode 100644
index c917491e18..0000000000
Binary files a/tests/fixtures/orderforms/1508.32.tomte.xlsx and /dev/null differ
diff --git a/tests/fixtures/orderforms/1508.33.balsamic.xlsx b/tests/fixtures/orderforms/1508.33.balsamic.xlsx
new file mode 100644
index 0000000000..4ff595492b
Binary files /dev/null and b/tests/fixtures/orderforms/1508.33.balsamic.xlsx differ
diff --git a/tests/fixtures/orderforms/1508.33.balsamic_qc.xlsx b/tests/fixtures/orderforms/1508.33.balsamic_qc.xlsx
new file mode 100644
index 0000000000..f39c09efd1
Binary files /dev/null and b/tests/fixtures/orderforms/1508.33.balsamic_qc.xlsx differ
diff --git a/tests/fixtures/orderforms/1508.33.balsamic_umi.xlsx b/tests/fixtures/orderforms/1508.33.balsamic_umi.xlsx
new file mode 100644
index 0000000000..678e3793ea
Binary files /dev/null and b/tests/fixtures/orderforms/1508.33.balsamic_umi.xlsx differ
diff --git a/tests/fixtures/orderforms/1508.33.fastq.xlsx b/tests/fixtures/orderforms/1508.33.fastq.xlsx
new file mode 100644
index 0000000000..c2e6024cd7
Binary files /dev/null and b/tests/fixtures/orderforms/1508.33.fastq.xlsx differ
diff --git a/tests/fixtures/orderforms/1508.33.metagenome.xlsx b/tests/fixtures/orderforms/1508.33.metagenome.xlsx
new file mode 100644
index 0000000000..571575c573
Binary files /dev/null and b/tests/fixtures/orderforms/1508.33.metagenome.xlsx differ
diff --git a/tests/fixtures/orderforms/1508.33.mip.xlsx b/tests/fixtures/orderforms/1508.33.mip.xlsx
new file mode 100644
index 0000000000..0d4551f04c
Binary files /dev/null and b/tests/fixtures/orderforms/1508.33.mip.xlsx differ
diff --git a/tests/fixtures/orderforms/1508.33.mip_rna.xlsx b/tests/fixtures/orderforms/1508.33.mip_rna.xlsx
new file mode 100644
index 0000000000..bf07d816b3
Binary files /dev/null and b/tests/fixtures/orderforms/1508.33.mip_rna.xlsx differ
diff --git a/tests/fixtures/orderforms/1508.33.rnafusion.xlsx b/tests/fixtures/orderforms/1508.33.rnafusion.xlsx
new file mode 100644
index 0000000000..a9fadc3f3c
Binary files /dev/null and b/tests/fixtures/orderforms/1508.33.rnafusion.xlsx differ
diff --git a/tests/fixtures/orderforms/1508.33.taxprofiler.xlsx b/tests/fixtures/orderforms/1508.33.taxprofiler.xlsx
new file mode 100644
index 0000000000..b8df363e76
Binary files /dev/null and b/tests/fixtures/orderforms/1508.33.taxprofiler.xlsx differ
diff --git a/tests/fixtures/orderforms/1508.33.tomte.xlsx b/tests/fixtures/orderforms/1508.33.tomte.xlsx
new file mode 100644
index 0000000000..705d0ad7b3
Binary files /dev/null and b/tests/fixtures/orderforms/1508.33.tomte.xlsx differ
diff --git a/tests/fixtures/orderforms/2184.10.sarscov2.xlsx b/tests/fixtures/orderforms/2184.10.sarscov2.xlsx
new file mode 100644
index 0000000000..80b49b58a3
Binary files /dev/null and b/tests/fixtures/orderforms/2184.10.sarscov2.xlsx differ
diff --git a/tests/fixtures/orderforms/2184.9.sarscov2.xlsx b/tests/fixtures/orderforms/2184.9.sarscov2.xlsx
deleted file mode 100644
index 6f4de53e5d..0000000000
Binary files a/tests/fixtures/orderforms/2184.9.sarscov2.xlsx and /dev/null differ
diff --git a/tests/meta/archive/test_archive_api.py b/tests/meta/archive/test_archive_api.py
index d33bde3e5d..61df598cc9 100644
--- a/tests/meta/archive/test_archive_api.py
+++ b/tests/meta/archive/test_archive_api.py
@@ -134,15 +134,18 @@ def test_call_corresponding_archiving_method(
sample=spring_archive_api.status_db.get_sample_by_internal_id(sample_id),
)
- with mock.patch.object(
- DDNDataFlowClient,
- "_set_auth_tokens",
- return_value=123,
- ), mock.patch.object(
- DDNDataFlowClient,
- "archive_file",
- return_value=123,
- ) as mock_request_submitter:
+ with (
+ mock.patch.object(
+ DDNDataFlowClient,
+ "_set_auth_tokens",
+ return_value=123,
+ ),
+ mock.patch.object(
+ DDNDataFlowClient,
+ "archive_file",
+ return_value=123,
+ ) as mock_request_submitter,
+ ):
# WHEN calling the corresponding archive method
spring_archive_api.archive_file_to_location(
file_and_sample=file_and_sample, archive_handler=ddn_dataflow_client
@@ -168,15 +171,18 @@ def test_archive_all_non_archived_spring_files(
# with the DDN customer having two samples, and the non-DDN having one sample.
# WHEN archiving all available files
- with mock.patch.object(
- AuthToken,
- "model_validate",
- return_value=test_auth_token,
- ), mock.patch.object(
- APIRequest,
- "api_request_from_content",
- return_value=ok_miria_response,
- ) as mock_request_submitter:
+ with (
+ mock.patch.object(
+ AuthToken,
+ "model_validate",
+ return_value=test_auth_token,
+ ),
+ mock.patch.object(
+ APIRequest,
+ "api_request_from_content",
+ return_value=ok_miria_response,
+ ) as mock_request_submitter,
+ ):
spring_archive_api.archive_spring_files_and_add_archives_to_housekeeper(
spring_file_count_limit=limit
)
@@ -233,18 +239,22 @@ def test_get_archival_status(
spring_archive_api.housekeeper_api.add_archives(files=[file], archive_task_id=archival_job_id)
# WHEN querying the task id and getting a "COMPLETED" response
- with mock.patch.object(
- AuthToken,
- "model_validate",
- return_value=test_auth_token,
- ), mock.patch.object(
- APIRequest,
- "api_request_from_content",
- return_value=ok_miria_job_status_response,
- ), mock.patch.object(
- DDNDataFlowClient,
- "_get_job_status",
- return_value=GetJobStatusResponse(id=archival_job_id, status=job_status),
+ with (
+ mock.patch.object(
+ AuthToken,
+ "model_validate",
+ return_value=test_auth_token,
+ ),
+ mock.patch.object(
+ APIRequest,
+ "api_request_from_content",
+ return_value=ok_miria_job_status_response,
+ ),
+ mock.patch.object(
+ DDNDataFlowClient,
+ "_get_job_status",
+ return_value=GetJobStatusResponse(id=archival_job_id, status=job_status),
+ ),
):
spring_archive_api.update_ongoing_task(
task_id=archival_job_id,
@@ -291,18 +301,22 @@ def test_get_retrieval_status(
)
# WHEN querying the task id
- with mock.patch.object(
- AuthToken,
- "model_validate",
- return_value=test_auth_token,
- ), mock.patch.object(
- APIRequest,
- "api_request_from_content",
- return_value=ok_miria_job_status_response,
- ), mock.patch.object(
- DDNDataFlowClient,
- "_get_job_status",
- return_value=GetJobStatusResponse(id=retrieval_job_id, status=job_status),
+ with (
+ mock.patch.object(
+ AuthToken,
+ "model_validate",
+ return_value=test_auth_token,
+ ),
+ mock.patch.object(
+ APIRequest,
+ "api_request_from_content",
+ return_value=ok_miria_job_status_response,
+ ),
+ mock.patch.object(
+ DDNDataFlowClient,
+ "_get_job_status",
+ return_value=GetJobStatusResponse(id=retrieval_job_id, status=job_status),
+ ),
):
spring_archive_api.update_ongoing_task(
task_id=retrieval_job_id,
@@ -345,15 +359,18 @@ def test_retrieve_case(
sample: Sample = spring_archive_api.status_db.get_sample_by_internal_id(sample_with_spring_file)
# WHEN archiving all available files
- with mock.patch.object(
- AuthToken,
- "model_validate",
- return_value=test_auth_token,
- ), mock.patch.object(
- APIRequest,
- "api_request_from_content",
- return_value=ok_miria_response,
- ) as mock_request_submitter:
+ with (
+ mock.patch.object(
+ AuthToken,
+ "model_validate",
+ return_value=test_auth_token,
+ ),
+ mock.patch.object(
+ APIRequest,
+ "api_request_from_content",
+ return_value=ok_miria_response,
+ ) as mock_request_submitter,
+ ):
spring_archive_api.retrieve_spring_files_for_case(sample.links[0].case.internal_id)
retrieve_request_json["pathInfo"][0]["source"] += "/" + Path(files[0].path).name
@@ -400,15 +417,18 @@ def test_retrieve_sample(
assert file.archive
# WHEN archiving all available files
- with mock.patch.object(
- AuthToken,
- "model_validate",
- return_value=test_auth_token,
- ), mock.patch.object(
- APIRequest,
- "api_request_from_content",
- return_value=ok_miria_response,
- ) as mock_request_submitter:
+ with (
+ mock.patch.object(
+ AuthToken,
+ "model_validate",
+ return_value=test_auth_token,
+ ),
+ mock.patch.object(
+ APIRequest,
+ "api_request_from_content",
+ return_value=ok_miria_response,
+ ) as mock_request_submitter,
+ ):
spring_archive_api.retrieve_spring_files_for_sample(sample_with_spring_file)
retrieve_request_json["pathInfo"][0]["source"] += "/" + Path(files[0].path).name
@@ -457,15 +477,18 @@ def test_retrieve_order(
sample: Sample = spring_archive_api.status_db.get_sample_by_internal_id(sample_with_spring_file)
# WHEN archiving all available files
- with mock.patch.object(
- AuthToken,
- "model_validate",
- return_value=test_auth_token,
- ), mock.patch.object(
- APIRequest,
- "api_request_from_content",
- return_value=ok_miria_response,
- ) as mock_request_submitter:
+ with (
+ mock.patch.object(
+ AuthToken,
+ "model_validate",
+ return_value=test_auth_token,
+ ),
+ mock.patch.object(
+ APIRequest,
+ "api_request_from_content",
+ return_value=ok_miria_response,
+ ) as mock_request_submitter,
+ ):
spring_archive_api.retrieve_spring_files_for_order(
id_=sample.original_ticket, is_order_id=False
)
@@ -509,16 +532,18 @@ def test_delete_file_raises_http_error(
)
# GIVEN that the request returns a failed response
- with mock.patch.object(
- DDNDataFlowClient,
- "_get_auth_token",
- return_value=test_auth_token,
- ), mock.patch.object(
- APIRequest,
- "api_request_from_content",
- return_value=failed_delete_file_response,
- ), pytest.raises(
- HTTPError
+ with (
+ mock.patch.object(
+ DDNDataFlowClient,
+ "_get_auth_token",
+ return_value=test_auth_token,
+ ),
+ mock.patch.object(
+ APIRequest,
+ "api_request_from_content",
+ return_value=failed_delete_file_response,
+ ),
+ pytest.raises(HTTPError),
):
# WHEN trying to delete the file via Miria and in Housekeeper
@@ -551,14 +576,17 @@ def test_delete_file_success(
)
# GIVEN that the delete request returns a successful response
- with mock.patch.object(
- DDNDataFlowClient,
- "_get_auth_token",
- return_value=test_auth_token,
- ), mock.patch.object(
- APIRequest,
- "api_request_from_content",
- return_value=ok_delete_file_response,
+ with (
+ mock.patch.object(
+ DDNDataFlowClient,
+ "_get_auth_token",
+ return_value=test_auth_token,
+ ),
+ mock.patch.object(
+ APIRequest,
+ "api_request_from_content",
+ return_value=ok_delete_file_response,
+ ),
):
# WHEN trying to delete the file via Miria and in Housekeeper
diff --git a/tests/meta/archive/test_archive_cli.py b/tests/meta/archive/test_archive_cli.py
index 6a7cfa08a2..876f9d83bb 100644
--- a/tests/meta/archive/test_archive_cli.py
+++ b/tests/meta/archive/test_archive_cli.py
@@ -10,11 +10,7 @@
from cg.constants import EXIT_SUCCESS, SequencingFileTag
from cg.constants.archiving import ArchiveLocations
from cg.io.controller import APIRequest
-from cg.meta.archive.ddn.constants import (
- FAILED_JOB_STATUSES,
- ONGOING_JOB_STATUSES,
- JobStatus,
-)
+from cg.meta.archive.ddn.constants import FAILED_JOB_STATUSES, ONGOING_JOB_STATUSES, JobStatus
from cg.meta.archive.ddn.ddn_data_flow_client import DDNDataFlowClient
from cg.meta.archive.ddn.models import ArchivalResponse, AuthToken, GetJobStatusResponse
from cg.models.cg_config import CGConfig
@@ -61,16 +57,20 @@ def test_archive_spring_files_success(
spring_file: File = all_non_archived_spring_files[0]
# WHEN running 'cg archive archive-spring-files'
- with mock.patch.object(
- AuthToken,
- "model_validate",
- return_value=test_auth_token,
- ), mock.patch.object(
- APIRequest,
- "api_request_from_content",
- return_value=ok_miria_response,
- ), mock.patch.object(
- DDNDataFlowClient, "_archive_file", return_value=ArchivalResponse(jobId=archival_job_id)
+ with (
+ mock.patch.object(
+ AuthToken,
+ "model_validate",
+ return_value=test_auth_token,
+ ),
+ mock.patch.object(
+ APIRequest,
+ "api_request_from_content",
+ return_value=ok_miria_response,
+ ),
+ mock.patch.object(
+ DDNDataFlowClient, "_archive_file", return_value=ArchivalResponse(jobId=archival_job_id)
+ ),
):
result = cli_runner.invoke(
archive_spring_files,
@@ -105,18 +105,22 @@ def test_get_archival_job_status(
assert not archive_context.housekeeper_api.get_archive_entries()[0].archived_at
# WHEN invoking update_job_statuses
- with mock.patch.object(
- AuthToken,
- "model_validate",
- return_value=test_auth_token,
- ), mock.patch.object(
- APIRequest,
- "api_request_from_content",
- return_value=ok_miria_response,
- ), mock.patch.object(
- DDNDataFlowClient,
- "_get_job_status",
- return_value=GetJobStatusResponse(id=archival_job_id, status=job_status),
+ with (
+ mock.patch.object(
+ AuthToken,
+ "model_validate",
+ return_value=test_auth_token,
+ ),
+ mock.patch.object(
+ APIRequest,
+ "api_request_from_content",
+ return_value=ok_miria_response,
+ ),
+ mock.patch.object(
+ DDNDataFlowClient,
+ "_get_job_status",
+ return_value=GetJobStatusResponse(id=archival_job_id, status=job_status),
+ ),
):
result = cli_runner.invoke(
update_job_statuses,
@@ -162,18 +166,22 @@ def test_get_retrieval_job_status(
retrieving_archive.retrieval_task_id = retrieval_job_id
# WHEN invoking update_job_statuses
- with mock.patch.object(
- AuthToken,
- "model_validate",
- return_value=test_auth_token,
- ), mock.patch.object(
- APIRequest,
- "api_request_from_content",
- return_value=ok_miria_response,
- ), mock.patch.object(
- DDNDataFlowClient,
- "_get_job_status",
- return_value=GetJobStatusResponse(id=retrieval_job_id, status=job_status),
+ with (
+ mock.patch.object(
+ AuthToken,
+ "model_validate",
+ return_value=test_auth_token,
+ ),
+ mock.patch.object(
+ APIRequest,
+ "api_request_from_content",
+ return_value=ok_miria_response,
+ ),
+ mock.patch.object(
+ DDNDataFlowClient,
+ "_get_job_status",
+ return_value=GetJobStatusResponse(id=retrieval_job_id, status=job_status),
+ ),
):
result = cli_runner.invoke(
update_job_statuses,
@@ -220,16 +228,18 @@ def test_delete_file_raises_http_error(
)
# GIVEN that the request returns a failed response
- with mock.patch.object(
- DDNDataFlowClient,
- "_get_auth_token",
- return_value=test_auth_token,
- ), mock.patch.object(
- APIRequest,
- "api_request_from_content",
- return_value=failed_delete_file_response,
- ), pytest.raises(
- HTTPError
+ with (
+ mock.patch.object(
+ DDNDataFlowClient,
+ "_get_auth_token",
+ return_value=test_auth_token,
+ ),
+ mock.patch.object(
+ APIRequest,
+ "api_request_from_content",
+ return_value=failed_delete_file_response,
+ ),
+ pytest.raises(HTTPError),
):
# WHEN trying to delete the file via Miria and in Housekeeper
@@ -265,14 +275,17 @@ def test_delete_file_success(
)
# GIVEN that the delete request returns a successful response
- with mock.patch.object(
- DDNDataFlowClient,
- "_get_auth_token",
- return_value=test_auth_token,
- ), mock.patch.object(
- APIRequest,
- "api_request_from_content",
- return_value=ok_delete_file_response,
+ with (
+ mock.patch.object(
+ DDNDataFlowClient,
+ "_get_auth_token",
+ return_value=test_auth_token,
+ ),
+ mock.patch.object(
+ APIRequest,
+ "api_request_from_content",
+ return_value=ok_delete_file_response,
+ ),
):
# WHEN trying to delete the file via Miria and in Housekeeper
diff --git a/tests/meta/orders/conftest.py b/tests/meta/orders/conftest.py
deleted file mode 100644
index 5857e67a8a..0000000000
--- a/tests/meta/orders/conftest.py
+++ /dev/null
@@ -1,43 +0,0 @@
-from pathlib import Path
-
-import pytest
-
-from cg.clients.freshdesk.freshdesk_client import FreshdeskClient
-from cg.meta.orders import OrdersAPI
-from cg.meta.orders.ticket_handler import TicketHandler
-from cg.services.orders.submitters.order_submitter_registry import (
- OrderSubmitterRegistry,
- setup_order_submitter_registry,
-)
-from cg.store.store import Store
-from tests.mocks.limsmock import MockLimsAPI
-
-
-@pytest.fixture
-def freshdesk_client():
- return FreshdeskClient(base_url="https://mock.freshdesk.com", api_key="mock_api_key")
-
-
-@pytest.fixture(scope="function")
-def orders_api(
- base_store: Store,
- ticket_handler: TicketHandler,
- lims_api: MockLimsAPI,
- order_submitter_registry: OrderSubmitterRegistry,
-) -> OrdersAPI:
- return OrdersAPI(
- lims=lims_api,
- status=base_store,
- ticket_handler=ticket_handler,
- submitter_registry=order_submitter_registry,
- )
-
-
-@pytest.fixture
-def ticket_handler(store: Store, freshdesk_client: FreshdeskClient) -> TicketHandler:
- return TicketHandler(db=store, client=freshdesk_client, system_email_id=12345, env="production")
-
-
-@pytest.fixture
-def order_submitter_registry(base_store: Store, lims_api: MockLimsAPI) -> OrderSubmitterRegistry:
- return setup_order_submitter_registry(lims=lims_api, status_db=base_store)
diff --git a/tests/meta/orders/test_meta_orders_api.py b/tests/meta/orders/test_meta_orders_api.py
deleted file mode 100644
index 04ffe40588..0000000000
--- a/tests/meta/orders/test_meta_orders_api.py
+++ /dev/null
@@ -1,624 +0,0 @@
-import datetime as dt
-from unittest.mock import Mock, patch
-
-import pytest
-
-from cg.clients.freshdesk.models import TicketResponse
-from cg.constants import DataDelivery
-from cg.constants.constants import Workflow
-from cg.constants.subject import Sex
-from cg.exc import OrderError, TicketCreationError
-from cg.meta.orders import OrdersAPI
-from cg.models.orders.order import OrderIn, OrderType
-from cg.models.orders.samples import MipDnaSample
-from cg.services.orders.validate_order_services.validate_case_order import (
- ValidateCaseOrderService,
-)
-from cg.store.models import Case, Customer, Pool, Sample
-from cg.store.store import Store
-from tests.store_helpers import StoreHelpers
-
-
-def monkeypatch_process_lims(monkeypatch, order_data) -> None:
- lims_project_data = {"id": "ADM1234", "date": dt.datetime.now()}
- lims_map = {sample.name: f"ELH123A{index}" for index, sample in enumerate(order_data.samples)}
- monkeypatch.setattr(
- "cg.services.orders.order_lims_service.order_lims_service.OrderLimsService.process_lims",
- lambda *args, **kwargs: (lims_project_data, lims_map),
- )
-
-
-def mock_freshdesk_ticket_creation(mock_create_ticket, ticket_id: str):
- """Helper function to mock Freshdesk ticket creation."""
- mock_create_ticket.return_value = TicketResponse(
- id=int(ticket_id),
- description="This is a test description.",
- subject="Support needed..",
- status=2,
- priority=1,
- )
-
-
-def mock_freshdesk_reply_to_ticket(mock_reply_to_ticket):
- """Helper function to mock Freshdesk reply to ticket."""
- mock_reply_to_ticket.return_value = None
-
-
-def test_too_long_order_name():
- # GIVEN order with more than allowed characters name
- long_name = "A super long order name that is longer than sixty-four characters."
- assert len(long_name) > Sample.order.property.columns[0].type.length
-
- # WHEN placing it in the pydantic order model
- # THEN an error is raised
- with pytest.raises(ValueError):
- OrderIn(name=long_name, customer="", comment="", samples=[])
-
-
-@pytest.mark.parametrize(
- "order_type",
- [
- OrderType.BALSAMIC,
- OrderType.FASTQ,
- OrderType.FLUFFY,
- OrderType.METAGENOME,
- OrderType.MICROSALT,
- OrderType.MIP_DNA,
- OrderType.MIP_RNA,
- OrderType.RML,
- OrderType.RNAFUSION,
- OrderType.SARS_COV_2,
- ],
-)
-def test_submit(
- all_orders_to_submit: dict,
- base_store: Store,
- monkeypatch: pytest.MonkeyPatch,
- order_type: OrderType,
- orders_api: OrdersAPI,
- ticket_id: str,
- user_mail: str,
- user_name: str,
-):
- with patch(
- "cg.clients.freshdesk.freshdesk_client.FreshdeskClient.create_ticket"
- ) as mock_create_ticket, patch(
- "cg.clients.freshdesk.freshdesk_client.FreshdeskClient.reply_to_ticket"
- ) as mock_reply_to_ticket:
- mock_freshdesk_ticket_creation(mock_create_ticket, ticket_id)
- mock_freshdesk_reply_to_ticket(mock_reply_to_ticket)
-
- order_data = OrderIn.parse_obj(obj=all_orders_to_submit[order_type], project=order_type)
- monkeypatch_process_lims(monkeypatch, order_data)
-
- # GIVEN an order and an empty store
- assert not base_store._get_query(table=Sample).first()
-
- # WHEN submitting the order
-
- result = orders_api.submit(
- project=order_type, order_in=order_data, user_name=user_name, user_mail=user_mail
- )
-
- # THEN the result should contain the ticket number for the order
- for record in result["records"]:
- if isinstance(record, Pool):
- assert record.ticket == ticket_id
- elif isinstance(record, Sample):
- assert record.original_ticket == ticket_id
- elif isinstance(record, Case):
- for link_obj in record.links:
- assert link_obj.sample.original_ticket == ticket_id
-
-
-@pytest.mark.parametrize(
- "order_type",
- [OrderType.MIP_DNA, OrderType.MIP_RNA, OrderType.BALSAMIC],
-)
-def test_submit_ticketexception(
- all_orders_to_submit,
- orders_api: OrdersAPI,
- order_type: OrderType,
- user_mail: str,
- user_name: str,
-):
- # GIVEN a mock Freshdesk ticket creation that raises TicketCreationError
- with patch(
- "cg.clients.freshdesk.freshdesk_client.FreshdeskClient.create_ticket",
- side_effect=TicketCreationError("ERROR"),
- ):
- # GIVEN an order that does not have a name (ticket_nr)
- order_data = OrderIn.parse_obj(obj=all_orders_to_submit[order_type], project=order_type)
- order_data.name = "dummy_name"
-
- # WHEN the order is submitted and a TicketCreationError raised
- # THEN the TicketCreationError is not excepted
- with pytest.raises(TicketCreationError):
- orders_api.submit(
- project=order_type,
- order_in=order_data,
- user_name=user_name,
- user_mail=user_mail,
- )
-
-
-@pytest.mark.parametrize(
- "order_type",
- [OrderType.MIP_DNA, OrderType.MIP_RNA, OrderType.BALSAMIC],
-)
-def test_submit_illegal_sample_customer(
- all_orders_to_submit: dict,
- monkeypatch: pytest.MonkeyPatch,
- order_type: OrderType,
- orders_api: OrdersAPI,
- sample_store: Store,
- user_mail: str,
- user_name: str,
-):
- order_data = OrderIn.parse_obj(obj=all_orders_to_submit[order_type], project=order_type)
- monkeypatch_process_lims(monkeypatch, order_data)
- # GIVEN we have an order with a customer that is not in the same customer group as customer
- # that the samples originate from
- new_customer = sample_store.add_customer(
- "customer999",
- "customer 999",
- scout_access=True,
- invoice_address="dummy street",
- invoice_reference="dummy nr",
- )
- sample_store.session.add(new_customer)
- existing_sample: Sample = sample_store._get_query(table=Sample).first()
- existing_sample.customer = new_customer
- sample_store.session.add(existing_sample)
- sample_store.session.commit()
- for sample in order_data.samples:
- sample.internal_id = existing_sample.internal_id
-
- # WHEN calling submit
- # THEN an OrderError should be raised on illegal customer
- with pytest.raises(OrderError):
- orders_api.submit(
- project=order_type,
- order_in=order_data,
- user_name=user_name,
- user_mail=user_mail,
- )
-
-
-@pytest.mark.parametrize(
- "order_type",
- [OrderType.MIP_DNA, OrderType.MIP_RNA, OrderType.BALSAMIC],
-)
-def test_submit_scout_legal_sample_customer(
- all_orders_to_submit: dict,
- monkeypatch: pytest.MonkeyPatch,
- order_type: OrderType,
- orders_api: OrdersAPI,
- sample_store: Store,
- user_mail: str,
- user_name: str,
- ticket_id: str,
-):
- with patch(
- "cg.clients.freshdesk.freshdesk_client.FreshdeskClient.create_ticket"
- ) as mock_create_ticket, patch(
- "cg.clients.freshdesk.freshdesk_client.FreshdeskClient.reply_to_ticket"
- ) as mock_reply_to_ticket:
- mock_freshdesk_ticket_creation(mock_create_ticket, ticket_id)
- mock_freshdesk_reply_to_ticket(mock_reply_to_ticket)
- order_data = OrderIn.parse_obj(obj=all_orders_to_submit[order_type], project=order_type)
- monkeypatch_process_lims(monkeypatch, order_data)
- # GIVEN we have an order with a customer that is in the same customer group as customer
- # that the samples originate from
- collaboration = sample_store.add_collaboration("customer999only", "customer 999 only group")
- sample_store.session.add(collaboration)
- sample_customer = sample_store.add_customer(
- "customer1",
- "customer 1",
- scout_access=True,
- invoice_address="dummy street 1",
- invoice_reference="dummy nr",
- )
- order_customer = sample_store.add_customer(
- "customer2",
- "customer 2",
- scout_access=True,
- invoice_address="dummy street 2",
- invoice_reference="dummy nr",
- )
- sample_customer.collaborations.append(collaboration)
- order_customer.collaborations.append(collaboration)
- sample_store.session.add(sample_customer)
- sample_store.session.add(order_customer)
- existing_sample: Sample = sample_store._get_query(table=Sample).first()
- existing_sample.customer = sample_customer
- sample_store.session.commit()
- order_data.customer = order_customer.internal_id
-
- for sample in order_data.samples:
- sample.internal_id = existing_sample.internal_id
- break
-
- # WHEN calling submit
- # THEN an OrderError should not be raised on illegal customer
- orders_api.submit(
- project=order_type, order_in=order_data, user_name=user_name, user_mail=user_mail
- )
-
-
-@pytest.mark.parametrize(
- "order_type",
- [OrderType.MIP_DNA, OrderType.MIP_RNA, OrderType.BALSAMIC],
-)
-def test_submit_duplicate_sample_case_name(
- all_orders_to_submit: dict,
- monkeypatch: pytest.MonkeyPatch,
- order_type: OrderType,
- orders_api: OrdersAPI,
- ticket_id: str,
- user_mail: str,
- user_name: str,
-):
- # GIVEN we have an order with a case that is already in the database
- order_data = OrderIn.parse_obj(obj=all_orders_to_submit[order_type], project=order_type)
- store = orders_api.status
- customer: Customer = store.get_customer_by_internal_id(customer_internal_id=order_data.customer)
- for sample in order_data.samples:
- case_id = sample.family_name
- if not store.get_case_by_name_and_customer(customer=customer, case_name=case_id):
- case: Case = store.add_case(
- data_analysis=Workflow.MIP_DNA,
- data_delivery=DataDelivery.SCOUT,
- name=case_id,
- ticket=ticket_id,
- )
- case.customer = customer
- store.session.add(case)
- store.session.commit()
- assert store.get_case_by_name_and_customer(customer=customer, case_name=case_id)
-
- monkeypatch_process_lims(monkeypatch, order_data)
-
- # WHEN calling submit
- # THEN an OrderError should be raised on duplicate case name
- with pytest.raises(OrderError):
- orders_api.submit(
- project=order_type,
- order_in=order_data,
- user_name=user_name,
- user_mail=user_mail,
- )
-
-
-@pytest.mark.parametrize(
- "order_type",
- [OrderType.FLUFFY],
-)
-def test_submit_fluffy_duplicate_sample_case_name(
- all_orders_to_submit: dict,
- monkeypatch: pytest.MonkeyPatch,
- order_type: OrderType,
- orders_api: OrdersAPI,
- user_mail: str,
- user_name: str,
- ticket_id: str,
-):
- with patch(
- "cg.clients.freshdesk.freshdesk_client.FreshdeskClient.create_ticket"
- ) as mock_create_ticket, patch(
- "cg.clients.freshdesk.freshdesk_client.FreshdeskClient.reply_to_ticket"
- ) as mock_reply_to_ticket:
- mock_freshdesk_ticket_creation(mock_create_ticket, ticket_id)
- mock_freshdesk_reply_to_ticket(mock_reply_to_ticket)
- # GIVEN we have an order with a case that is already in the database
- order_data = OrderIn.parse_obj(obj=all_orders_to_submit[order_type], project=order_type)
- monkeypatch_process_lims(monkeypatch, order_data)
-
- orders_api.submit(
- project=order_type, order_in=order_data, user_name=user_name, user_mail=user_mail
- )
-
- # WHEN calling submit
- # THEN an OrderError should be raised on duplicate case name
- with pytest.raises(OrderError):
- orders_api.submit(
- project=order_type,
- order_in=order_data,
- user_name=user_name,
- user_mail=user_mail,
- )
-
-
-def test_submit_unique_sample_case_name(
- orders_api: OrdersAPI,
- mip_order_to_submit: dict,
- user_name: str,
- user_mail: str,
- monkeypatch: pytest.MonkeyPatch,
- ticket_id: str,
-):
- with patch(
- "cg.clients.freshdesk.freshdesk_client.FreshdeskClient.create_ticket"
- ) as mock_create_ticket, patch(
- "cg.clients.freshdesk.freshdesk_client.FreshdeskClient.reply_to_ticket"
- ) as mock_reply_to_ticket:
- mock_freshdesk_ticket_creation(mock_create_ticket, ticket_id)
- mock_freshdesk_reply_to_ticket(mock_reply_to_ticket)
-
- # GIVEN we have an order with a case that is not existing in the database
- order_data = OrderIn.parse_obj(obj=mip_order_to_submit, project=OrderType.MIP_DNA)
-
- store = orders_api.status
-
- sample: MipDnaSample
- for sample in order_data.samples:
- case_id = sample.family_name
- customer: Customer = store.get_customer_by_internal_id(
- customer_internal_id=order_data.customer
- )
- assert not store.get_case_by_name_and_customer(customer=customer, case_name=case_id)
-
- monkeypatch_process_lims(monkeypatch, order_data)
-
- # WHEN calling submit
- orders_api.submit(
- project=OrderType.MIP_DNA,
- order_in=order_data,
- user_name=user_name,
- user_mail=user_mail,
- )
-
- # Then no exception about duplicate names should be thrown
-
-
-def test_validate_sex_inconsistent_sex(
- orders_api: OrdersAPI, mip_order_to_submit: dict, helpers: StoreHelpers
-):
- # GIVEN we have an order with a sample that is already in the database but with different sex
- order_data = OrderIn.parse_obj(mip_order_to_submit, project=OrderType.MIP_DNA)
- store = orders_api.status
- customer: Customer = store.get_customer_by_internal_id(customer_internal_id=order_data.customer)
-
- # add sample with different sex than in order
- sample: MipDnaSample
- for sample in order_data.samples:
- sample_obj: Sample = helpers.add_sample(
- store=store,
- customer_id=customer.internal_id,
- sex=Sex.MALE if sample.sex == Sex.FEMALE else Sex.FEMALE,
- name=sample.name,
- subject_id=sample.subject_id,
- )
- store.session.add(sample_obj)
- store.session.commit()
- assert sample_obj.sex != sample.sex
-
- validator = ValidateCaseOrderService(status_db=orders_api.status)
-
- # WHEN calling _validate_sex
- # THEN an OrderError should be raised on non-matching sex
- with pytest.raises(OrderError):
- validator._validate_subject_sex(samples=order_data.samples, customer_id=order_data.customer)
-
-
-def test_validate_sex_consistent_sex(
- orders_api: OrdersAPI, mip_order_to_submit: dict, helpers: StoreHelpers
-):
- # GIVEN we have an order with a sample that is already in the database and with same gender
- order_data = OrderIn.parse_obj(mip_order_to_submit, project=OrderType.MIP_DNA)
- store = orders_api.status
- customer: Customer = store.get_customer_by_internal_id(customer_internal_id=order_data.customer)
-
- # add sample with different sex than in order
- sample: MipDnaSample
- for sample in order_data.samples:
- sample_obj: Sample = helpers.add_sample(
- store=store,
- customer_id=customer.internal_id,
- sex=sample.sex,
- name=sample.name,
- subject_id=sample.subject_id,
- )
- store.session.add(sample_obj)
- store.session.commit()
- assert sample_obj.sex == sample.sex
-
- validator = ValidateCaseOrderService(status_db=orders_api.status)
-
- # WHEN calling _validate_sex
- validator._validate_subject_sex(samples=order_data.samples, customer_id=order_data.customer)
-
- # THEN no OrderError should be raised on non-matching sex
-
-
-def test_validate_sex_unknown_existing_sex(
- orders_api: OrdersAPI, mip_order_to_submit: dict, helpers: StoreHelpers
-):
- # GIVEN we have an order with a sample that is already in the database and with different gender but the existing is
- # of type "unknown"
- order_data = OrderIn.parse_obj(mip_order_to_submit, project=OrderType.MIP_DNA)
- store = orders_api.status
- customer: Customer = store.get_customer_by_internal_id(customer_internal_id=order_data.customer)
-
- # add sample with different sex than in order
- sample: MipDnaSample
- for sample in order_data.samples:
- sample_obj: Sample = helpers.add_sample(
- store=store,
- customer_id=customer.internal_id,
- sex=Sex.UNKNOWN,
- name=sample.name,
- subject_id=sample.subject_id,
- )
- store.session.add(sample_obj)
- store.session.commit()
- assert sample_obj.sex != sample.sex
-
- validator = ValidateCaseOrderService(status_db=orders_api.status)
-
- # WHEN calling _validate_sex
- validator._validate_subject_sex(samples=order_data.samples, customer_id=order_data.customer)
-
- # THEN no OrderError should be raised on non-matching sex
-
-
-def test_validate_sex_unknown_new_sex(
- orders_api: OrdersAPI, mip_order_to_submit: dict, helpers: StoreHelpers
-):
- # GIVEN we have an order with a sample that is already in the database and with different gender but the new is of
- # type "unknown"
- order_data = OrderIn.parse_obj(mip_order_to_submit, project=OrderType.MIP_DNA)
- store = orders_api.status
- customer: Customer = store.get_customer_by_internal_id(customer_internal_id=order_data.customer)
-
- # add sample with different sex than in order
- for sample in order_data.samples:
- sample_obj: Sample = helpers.add_sample(
- store=store,
- customer_id=customer.internal_id,
- sex=sample.sex,
- name=sample.name,
- subject_id=sample.subject_id,
- )
- sample.sex = "unknown"
- store.session.add(sample_obj)
- store.session.commit()
-
- for sample in order_data.samples:
- assert sample_obj.sex != sample.sex
-
- validator = ValidateCaseOrderService(status_db=orders_api.status)
-
- # WHEN calling _validate_sex
- validator._validate_subject_sex(samples=order_data.samples, customer_id=order_data.customer)
-
- # THEN no OrderError should be raised on non-matching sex
-
-
-@pytest.mark.parametrize(
- "order_type",
- [
- OrderType.BALSAMIC,
- OrderType.FASTQ,
- OrderType.FLUFFY,
- OrderType.METAGENOME,
- OrderType.MICROSALT,
- OrderType.MIP_DNA,
- OrderType.MIP_RNA,
- OrderType.RML,
- OrderType.SARS_COV_2,
- ],
-)
-def test_submit_unique_sample_name(
- all_orders_to_submit: dict,
- monkeypatch: pytest.MonkeyPatch,
- order_type: OrderType,
- orders_api: OrdersAPI,
- user_mail: str,
- user_name: str,
- ticket_id: str,
-):
- with patch(
- "cg.clients.freshdesk.freshdesk_client.FreshdeskClient.create_ticket"
- ) as mock_create_ticket, patch(
- "cg.clients.freshdesk.freshdesk_client.FreshdeskClient.reply_to_ticket"
- ) as mock_reply_to_ticket:
- mock_freshdesk_ticket_creation(mock_create_ticket, ticket_id)
- mock_freshdesk_reply_to_ticket(mock_reply_to_ticket)
- # GIVEN we have an order with a sample that is not existing in the database
- order_data = OrderIn.parse_obj(obj=all_orders_to_submit[order_type], project=order_type)
- store = orders_api.status
- assert not store._get_query(table=Sample).first()
-
- monkeypatch_process_lims(monkeypatch, order_data)
-
- # WHEN calling submit
- orders_api.submit(
- project=order_type, order_in=order_data, user_name=user_name, user_mail=user_mail
- )
-
- # Then no exception about duplicate names should be thrown
-
-
-@pytest.mark.parametrize(
- "order_type",
- [OrderType.SARS_COV_2, OrderType.METAGENOME],
-)
-def test_sarscov2_submit_duplicate_sample_name(
- all_orders_to_submit: dict,
- helpers: StoreHelpers,
- monkeypatch: pytest.MonkeyPatch,
- order_type: OrderType,
- orders_api: OrdersAPI,
- user_mail: str,
- user_name: str,
-):
- # GIVEN we have an order with samples that is already in the database
- order_data = OrderIn.parse_obj(obj=all_orders_to_submit[order_type], project=order_type)
- monkeypatch_process_lims(monkeypatch, order_data)
- store_samples_with_names_from_order(orders_api.status, helpers, order_data)
-
- # WHEN calling submit
- # THEN an OrderError should be raised on duplicate sample name
- with pytest.raises(OrderError):
- orders_api.submit(
- project=order_type,
- order_in=order_data,
- user_name=user_name,
- user_mail=user_mail,
- )
-
-
-def store_samples_with_names_from_order(store: Store, helpers: StoreHelpers, order_data: OrderIn):
- customer: Customer = store.get_customer_by_internal_id(customer_internal_id=order_data.customer)
- for sample in order_data.samples:
- sample_name = sample.name
- if not store.get_sample_by_customer_and_name(
- customer_entry_id=[customer.id], sample_name=sample_name
- ):
- sample_obj = helpers.add_sample(
- store=store, customer_id=customer.internal_id, name=sample_name
- )
- store.session.add(sample_obj)
- store.session.commit()
-
-
-@pytest.mark.parametrize(
- "order_type",
- [
- OrderType.BALSAMIC,
- OrderType.FASTQ,
- OrderType.MICROSALT,
- OrderType.MIP_DNA,
- OrderType.MIP_RNA,
- OrderType.RML,
- ],
-)
-def test_not_sarscov2_submit_duplicate_sample_name(
- all_orders_to_submit: dict,
- helpers: StoreHelpers,
- monkeypatch: pytest.MonkeyPatch,
- order_type: OrderType,
- orders_api: OrdersAPI,
- user_mail: str,
- user_name: str,
- ticket_id: str,
-):
- with patch(
- "cg.clients.freshdesk.freshdesk_client.FreshdeskClient.create_ticket"
- ) as mock_create_ticket, patch(
- "cg.clients.freshdesk.freshdesk_client.FreshdeskClient.reply_to_ticket"
- ) as mock_reply_to_ticket:
- mock_freshdesk_ticket_creation(mock_create_ticket, ticket_id)
- mock_freshdesk_reply_to_ticket(mock_reply_to_ticket)
- # GIVEN we have an order with samples that is already in the database
- order_data = OrderIn.parse_obj(obj=all_orders_to_submit[order_type], project=order_type)
- monkeypatch_process_lims(monkeypatch, order_data)
- store_samples_with_names_from_order(orders_api.status, helpers, order_data)
-
- # WHEN calling submit
- orders_api.submit(
- project=order_type, order_in=order_data, user_name=user_name, user_mail=user_mail
- )
-
- # THEN no OrderError should be raised on duplicate sample name
diff --git a/tests/meta/workflow/test_analysis.py b/tests/meta/workflow/test_analysis.py
index f968a864b9..8f5e5b2279 100644
--- a/tests/meta/workflow/test_analysis.py
+++ b/tests/meta/workflow/test_analysis.py
@@ -9,7 +9,7 @@
from cg.constants import GenePanelMasterList, Priority, SequencingRunDataAvailability
from cg.constants.archiving import ArchiveLocations
from cg.constants.constants import ControlOptions
-from cg.constants.priority import SlurmQos
+from cg.constants.priority import SlurmQos, TrailblazerPriority
from cg.constants.sequencing import Sequencers
from cg.exc import AnalysisNotReadyError
from cg.meta.archive.archive import SpringArchiveAPI
@@ -17,9 +17,9 @@
from cg.meta.workflow.mip import MipAnalysisAPI
from cg.meta.workflow.mip_dna import MipDNAAnalysisAPI
from cg.meta.workflow.prepare_fastq import PrepareFastqAPI
-from cg.meta.workflow.utils.utils import are_all_samples_control
+from cg.meta.workflow.utils.utils import MAP_TO_TRAILBLAZER_PRIORITY, are_all_samples_control
from cg.models.fastq import FastqFileMeta
-from cg.store.models import Case, Sample, IlluminaSequencingRun
+from cg.store.models import Case, IlluminaSequencingRun, Sample
from cg.store.store import Store
from tests.store_helpers import StoreHelpers
@@ -55,6 +55,39 @@ def test_get_slurm_qos_for_case(
assert slurm_qos is expected_slurm_qos
+@pytest.mark.parametrize(
+ "priority,expected_trailblazer_priority",
+ [
+ (Priority.clinical_trials, TrailblazerPriority.NORMAL),
+ (Priority.research, TrailblazerPriority.LOW),
+ (Priority.standard, TrailblazerPriority.NORMAL),
+ (Priority.priority, TrailblazerPriority.HIGH),
+ (Priority.express, TrailblazerPriority.EXPRESS),
+ ],
+)
+def test_get_trailblazer_priority(
+ case_id: str,
+ priority,
+ expected_trailblazer_priority,
+ mip_analysis_api: MipDNAAnalysisAPI,
+ analysis_store: Store,
+):
+ """Test get Trailblazer priority from the case priority"""
+
+ # GIVEN a store with a case with a specific priority
+ mip_analysis_api.status_db = analysis_store
+ case: Case = analysis_store.get_case_by_internal_id(case_id)
+ case.priority = priority
+
+ # WHEN getting the trailblazer priority for the case
+ trailblazer_priority: TrailblazerPriority = mip_analysis_api.get_trailblazer_priority(
+ case_id=case_id
+ )
+
+ # THEN the expected trailblazer priority should be returned
+ assert trailblazer_priority is expected_trailblazer_priority
+
+
def test_gene_panels_not_part_of_master_list(customer_id: str):
"""Test get only broad non-specific gene panels and custom gene panel list if a supplied gene panels is not part of master list."""
# GIVEN a customer who is a collaborator on the master list
@@ -226,13 +259,16 @@ def test_ensure_illumina_runs_on_disk_does_not_request_runs(
# GIVEN a case
# WHEN _is_illumina_run_check_available returns True and the attached flow cell is ON_DISK
- with mock.patch.object(
- AnalysisAPI,
- "_is_illumina_run_check_applicable",
- return_value=True,
- ), mock.patch.object(
- Store, "request_sequencing_runs_for_case", return_value=None
- ) as request_checker:
+ with (
+ mock.patch.object(
+ AnalysisAPI,
+ "_is_illumina_run_check_applicable",
+ return_value=True,
+ ),
+ mock.patch.object(
+ Store, "request_sequencing_runs_for_case", return_value=None
+ ) as request_checker,
+ ):
mip_analysis_api.ensure_illumina_run_on_disk(selected_novaseq_x_case_ids[0])
# THEN runs should not be requested for the case
@@ -285,9 +321,10 @@ def test_is_case_ready_for_analysis_true(
# GIVEN a case and a flow cell with status ON_DISK
# GIVEN that no decompression is needed nor running
- with mock.patch.object(
- PrepareFastqAPI, "is_spring_decompression_needed", return_value=False
- ), mock.patch.object(PrepareFastqAPI, "is_spring_decompression_running", return_value=False):
+ with (
+ mock.patch.object(PrepareFastqAPI, "is_spring_decompression_needed", return_value=False),
+ mock.patch.object(PrepareFastqAPI, "is_spring_decompression_running", return_value=False),
+ ):
# WHEN running is_case_ready_for_analysis
# THEN the result should be true
@@ -305,9 +342,10 @@ def test_is_case_ready_for_analysis_decompression_needed(
# GIVEN a case and an Illumina run
# GIVEN that some files need to be decompressed
- with mock.patch.object(
- PrepareFastqAPI, "is_spring_decompression_needed", return_value=True
- ), mock.patch.object(PrepareFastqAPI, "is_spring_decompression_running", return_value=False):
+ with (
+ mock.patch.object(PrepareFastqAPI, "is_spring_decompression_needed", return_value=True),
+ mock.patch.object(PrepareFastqAPI, "is_spring_decompression_running", return_value=False),
+ ):
# WHEN running is_case_ready_for_analysis
# THEN the result should be false
assert not mip_analysis_api.is_raw_data_ready_for_analysis(selected_novaseq_x_case_ids[0])
@@ -324,9 +362,10 @@ def test_is_case_ready_for_analysis_decompression_running(
# GIVEN a case and a Illumina sequencing run
# GIVEN that some files are being decompressed
- with mock.patch.object(
- PrepareFastqAPI, "is_spring_decompression_needed", return_value=False
- ), mock.patch.object(PrepareFastqAPI, "is_spring_decompression_running", return_value=True):
+ with (
+ mock.patch.object(PrepareFastqAPI, "is_spring_decompression_needed", return_value=False),
+ mock.patch.object(PrepareFastqAPI, "is_spring_decompression_running", return_value=True),
+ ):
# WHEN running is_case_ready_for_analysis
# THEN the result should be false
assert not mip_analysis_api.is_raw_data_ready_for_analysis(selected_novaseq_x_case_ids[0])
@@ -350,16 +389,21 @@ def test_prepare_fastq_files_success(
# GIVEN a case with an Illumina run ON_DISK
# GIVEN that no decompression is or running and adding the files to Housekeeper goes well
- with mock.patch.object(
- PrepareFastqAPI,
- "is_spring_decompression_needed",
- return_value=is_spring_decompression_needed,
- ), mock.patch.object(MipAnalysisAPI, "decompress_case", return_value=None), mock.patch.object(
- PrepareFastqAPI,
- "is_spring_decompression_running",
- return_value=is_spring_decompression_running,
- ), mock.patch.object(
- PrepareFastqAPI, "add_decompressed_fastq_files_to_housekeeper", return_value=None
+ with (
+ mock.patch.object(
+ PrepareFastqAPI,
+ "is_spring_decompression_needed",
+ return_value=is_spring_decompression_needed,
+ ),
+ mock.patch.object(MipAnalysisAPI, "decompress_case", return_value=None),
+ mock.patch.object(
+ PrepareFastqAPI,
+ "is_spring_decompression_running",
+ return_value=is_spring_decompression_running,
+ ),
+ mock.patch.object(
+ PrepareFastqAPI, "add_decompressed_fastq_files_to_housekeeper", return_value=None
+ ),
):
# WHEN running prepare_fastq_files
if is_spring_decompression_running or is_spring_decompression_needed:
@@ -384,17 +428,17 @@ def test_prepare_fastq_files_request_miria(
case.customer.data_archive_location = ArchiveLocations.KAROLINSKA_BUCKET
# GIVEN that at least one file is archived and not retrieved
- with mock.patch.object(
- PrepareFastqAPI, "is_spring_decompression_needed", return_value=False
- ), mock.patch.object(
- PrepareFastqAPI, "is_spring_decompression_running", return_value=False
- ), mock.patch.object(
- PrepareFastqAPI, "add_decompressed_fastq_files_to_housekeeper", return_value=None
- ), mock.patch.object(
- SpringArchiveAPI, "retrieve_spring_files_for_case"
- ), mock.patch.object(
- AnalysisAPI, "is_raw_data_ready_for_analysis", return_value=False
- ) as request_submitter:
+ with (
+ mock.patch.object(PrepareFastqAPI, "is_spring_decompression_needed", return_value=False),
+ mock.patch.object(PrepareFastqAPI, "is_spring_decompression_running", return_value=False),
+ mock.patch.object(
+ PrepareFastqAPI, "add_decompressed_fastq_files_to_housekeeper", return_value=None
+ ),
+ mock.patch.object(SpringArchiveAPI, "retrieve_spring_files_for_case"),
+ mock.patch.object(
+ AnalysisAPI, "is_raw_data_ready_for_analysis", return_value=False
+ ) as request_submitter,
+ ):
with pytest.raises(AnalysisNotReadyError):
# WHEN running prepare_fastq_files
@@ -422,12 +466,12 @@ def test_prepare_fastq_files_does_not_request_miria(
case.customer.data_archive_location = ArchiveLocations.KAROLINSKA_BUCKET
# GIVEN that all Illumina runs have status on disk
- with mock.patch.object(
- PrepareFastqAPI, "is_spring_decompression_needed", return_value=False
- ), mock.patch.object(
- PrepareFastqAPI, "is_spring_decompression_running", return_value=False
- ), mock.patch.object(
- PrepareFastqAPI, "add_decompressed_fastq_files_to_housekeeper", return_value=None
+ with (
+ mock.patch.object(PrepareFastqAPI, "is_spring_decompression_needed", return_value=False),
+ mock.patch.object(PrepareFastqAPI, "is_spring_decompression_running", return_value=False),
+ mock.patch.object(
+ PrepareFastqAPI, "add_decompressed_fastq_files_to_housekeeper", return_value=None
+ ),
):
# WHEN running prepare_fastq_files
@@ -483,10 +527,13 @@ def test_ensure_files_are_present(
# WHEN ensuring that all files are present
- with mock.patch.object(AnalysisAPI, "ensure_illumina_run_on_disk"), mock.patch.object(
- SpringArchiveAPI,
- "retrieve_spring_files_for_case",
- ) as request_submitter:
+ with (
+ mock.patch.object(AnalysisAPI, "ensure_illumina_run_on_disk"),
+ mock.patch.object(
+ SpringArchiveAPI,
+ "retrieve_spring_files_for_case",
+ ) as request_submitter,
+ ):
mip_analysis_api.ensure_files_are_present(case_id)
# THEN the files should have been requested
@@ -624,3 +671,36 @@ def test_case_with_controls_get_correct_slurmqos(
# THEN the result should match the expected QOS
assert qos == expected_qos
+
+
+@pytest.mark.parametrize(
+ "priority, expected_trailblazer_priority",
+ [
+ (Priority.clinical_trials, TrailblazerPriority.NORMAL),
+ (Priority.research, TrailblazerPriority.LOW),
+ (Priority.standard, TrailblazerPriority.NORMAL),
+ (Priority.priority, TrailblazerPriority.HIGH),
+ (Priority.express, TrailblazerPriority.EXPRESS),
+ ],
+)
+def test_get_trailblazer_priority(
+ case_id: str,
+ priority: Priority,
+ expected_trailblazer_priority: TrailblazerPriority,
+ mip_analysis_api: MipDNAAnalysisAPI,
+ analysis_store: Store,
+):
+ """Test get Trailblazer priority from the case priority"""
+
+ # GIVEN a store with a case with a specific priority
+ mip_analysis_api.status_db = analysis_store
+ case: Case = analysis_store.get_case_by_internal_id(case_id)
+ case.priority = priority
+
+ # WHEN getting the trailblazer priority for the priority
+ trailblazer_priority: TrailblazerPriority = mip_analysis_api.get_trailblazer_priority(
+ case_id=case_id
+ )
+
+ # THEN the expected trailblazer priority should be returned
+ assert trailblazer_priority is expected_trailblazer_priority
diff --git a/tests/meta/workflow/test_nallo.py b/tests/meta/workflow/test_nallo.py
new file mode 100644
index 0000000000..498dab090f
--- /dev/null
+++ b/tests/meta/workflow/test_nallo.py
@@ -0,0 +1,28 @@
+from cg.meta.workflow.nallo import NalloAnalysisAPI
+from cg.models.cg_config import CGConfig
+
+
+def test_get_sample_sheet_content(
+ nallo_context: CGConfig,
+ nallo_case_id: str,
+):
+ """Test Nallo nextflow sample sheet creation."""
+
+ # GIVEN Nallo analysis API
+ analysis_api: NalloAnalysisAPI = nallo_context.meta_apis["analysis_api"]
+
+ # WHEN getting the sample sheet content
+ result = analysis_api.get_sample_sheet_content(case_id=nallo_case_id)
+
+ # THEN return should contain patterns
+ patterns = [
+ "ADM1",
+ "m00000_000000_000000_s4.hifi_reads.bc2021.bam",
+ "nallo_case_two_samples",
+ ]
+
+ contains_pattern = any(
+ any(any(pattern in sub_element for pattern in patterns) for sub_element in element)
+ for element in result
+ )
+ assert contains_pattern
diff --git a/tests/meta/workflow/test_prepare_fastq_api.py b/tests/meta/workflow/test_prepare_fastq_api.py
index eea4eec8b3..f079022020 100644
--- a/tests/meta/workflow/test_prepare_fastq_api.py
+++ b/tests/meta/workflow/test_prepare_fastq_api.py
@@ -136,9 +136,14 @@ def test_fastq_should_be_added_to_housekeeper(
nr_of_files_before: int = len(list(version_object.files))
# GIVEN that the decompressed files exist
- with mock.patch.object(
- CompressionData, "file_exists_and_is_accessible", return_value=file_exists_and_is_accessible
- ), mock.patch.object(crunchy.files, "get_crunchy_metadata", returnvalue=[]):
+ with (
+ mock.patch.object(
+ CompressionData,
+ "file_exists_and_is_accessible",
+ return_value=file_exists_and_is_accessible,
+ ),
+ mock.patch.object(crunchy.files, "get_crunchy_metadata", returnvalue=[]),
+ ):
# WHEN adding decompressed fastq files
prepare_fastq_api.add_decompressed_fastq_files_to_housekeeper(case_id)
@@ -190,17 +195,21 @@ def test_add_decompressed_sample_loops_through_spring(
case = analysis_store.get_case_by_internal_id(case_id)
sample = case.samples[0]
- with mock.patch.object(
- files,
- "get_hk_files_dict",
- return_value={},
- ), mock.patch.object(
- files,
- "get_spring_paths",
- return_value=[CompressionData(spring_file.with_suffix(""))],
- ), mock.patch.object(
- CompressAPI, "add_decompressed_fastq", return_value=True
- ) as request_submitter:
+ with (
+ mock.patch.object(
+ files,
+ "get_hk_files_dict",
+ return_value={},
+ ),
+ mock.patch.object(
+ files,
+ "get_spring_paths",
+ return_value=[CompressionData(spring_file.with_suffix(""))],
+ ),
+ mock.patch.object(
+ CompressAPI, "add_decompressed_fastq", return_value=True
+ ) as request_submitter,
+ ):
# WHEN adding decompressed fastq files to Housekeeper
prepare_fastq_api.add_decompressed_sample(case=case, sample=sample)
diff --git a/tests/models/delivery_report/test_models_validators.py b/tests/models/delivery_report/test_models_validators.py
index 16c620df7e..cc0f62816b 100644
--- a/tests/models/delivery_report/test_models_validators.py
+++ b/tests/models/delivery_report/test_models_validators.py
@@ -19,7 +19,8 @@
YES_FIELD,
Sex,
)
-from cg.constants.constants import AnalysisType, Workflow
+from cg.constants.constants import Workflow
+from cg.constants.tb import AnalysisType
from cg.meta.delivery_report.delivery_report_api import DeliveryReportAPI
from cg.meta.delivery_report.rnafusion import RnafusionDeliveryReportAPI
from cg.models.analysis import NextflowAnalysis
@@ -94,7 +95,7 @@ def test_get_number_as_string(input_value: Any, expected_output: str, caplog: Lo
# GIVEN a list of number inputs and their expected values
- if expected_output == ValueError:
+ if expected_output is ValueError:
# WHEN getting a string representation of a number
with pytest.raises(ValueError):
get_number_as_string(input_value)
@@ -221,7 +222,7 @@ def test_get_analysis_type_as_string():
"""Test analysis type formatting for the delivery report generation."""
# GIVEN a WHOLE_GENOME_SEQUENCING analysis type and a model info dictionary
- analysis_type: str = AnalysisType.WHOLE_GENOME_SEQUENCING
+ analysis_type: str = AnalysisType.WGS
model_info = ValidationInfo
model_info.data = {"workflow": Workflow.MIP_DNA.value}
diff --git a/tests/services/__init__.py b/tests/services/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/services/fastq_file_service/conftest.py b/tests/services/fastq_file_service/conftest.py
index 4f5b20a92f..06860373a7 100644
--- a/tests/services/fastq_file_service/conftest.py
+++ b/tests/services/fastq_file_service/conftest.py
@@ -11,29 +11,67 @@ def fastq_file_service():
return FastqConcatenationService()
-def create_fastqs_directory(number_forward_reads, number_reverse_reads, tmp_path):
+def create_fastqs_directory(tmp_path: Path):
fastq_dir = Path(tmp_path, "fastqs")
fastq_dir.mkdir()
+ return fastq_dir
+
+
+def create_fastq_files(
+ fastq_dir: Path, number_forward_reads: int, number_reverse_reads: int, sample_id: str
+):
for i in range(number_forward_reads):
- file = Path(fastq_dir, f"sample_R1_{i}.fastq.gz")
- file.write_text(f"forward read {i}")
+ file = Path(fastq_dir, f"{sample_id}_R1_{i}.fastq.gz")
+ file.write_text(f"{sample_id} forward read {i}")
for i in range(number_reverse_reads):
- file = Path(fastq_dir, f"sample_R2_{i}.fastq.gz")
- file.write_text(f"reverse read {i}")
+ file = Path(fastq_dir, f"{sample_id}_R2_{i}.fastq.gz")
+ file.write_text(f"{sample_id} reverse read {i}")
+
+
+@pytest.fixture
+def fastqs_dir(tmp_path: Path, sample_id: str) -> Path:
+ fastq_dir: Path = create_fastqs_directory(tmp_path=tmp_path)
+ create_fastq_files(
+ fastq_dir=fastq_dir, number_forward_reads=3, number_reverse_reads=3, sample_id=sample_id
+ )
return fastq_dir
@pytest.fixture
-def fastqs_dir(tmp_path) -> Path:
- return create_fastqs_directory(
- number_forward_reads=3, number_reverse_reads=3, tmp_path=tmp_path
+def fastq_dir_existing_concatenated_files(tmp_path: Path, sample_id: str) -> Path:
+ fastq_dir: Path = create_fastqs_directory(tmp_path=tmp_path)
+ create_fastq_files(
+ fastq_dir=fastq_dir, number_forward_reads=3, number_reverse_reads=3, sample_id=sample_id
)
+ forward_output_path = Path(fastq_dir, "forward.fastq.gz")
+ reverse_output_path = Path(fastq_dir, "reverse.fastq.gz")
+ forward_output_path.write_text("Existing concatenated forward reads")
+ reverse_output_path.write_text("Existing concatenated reverse reads")
+ return fastq_dir
@pytest.fixture
-def fastqs_forward(tmp_path) -> Path:
+def fastqs_forward(tmp_path: Path, sample_id: str) -> Path:
"""Return a directory with only forward reads."""
- return create_fastqs_directory(
- number_forward_reads=3, number_reverse_reads=0, tmp_path=tmp_path
+ fastq_dir: Path = create_fastqs_directory(tmp_path=tmp_path)
+ create_fastq_files(
+ fastq_dir=fastq_dir, number_forward_reads=3, number_reverse_reads=0, sample_id=sample_id
)
+ return fastq_dir
+
+
+@pytest.fixture
+def fastqs_multiple_samples(tmp_path: Path, sample_id: str, another_sample_id: str) -> Path:
+ """Return a directory with fastq files for multiple samples."""
+ fastq_dir: Path = create_fastqs_directory(tmp_path=tmp_path)
+ create_fastq_files(
+ fastq_dir=fastq_dir, number_forward_reads=3, number_reverse_reads=3, sample_id=sample_id
+ )
+ create_fastq_files(
+ fastq_dir=fastq_dir,
+ number_forward_reads=3,
+ number_reverse_reads=3,
+ sample_id=another_sample_id,
+ )
+ return fastq_dir
diff --git a/tests/services/fastq_file_service/test_fastq_file_service.py b/tests/services/fastq_file_service/test_fastq_file_service.py
index a4dc9e25d1..546438c6d9 100644
--- a/tests/services/fastq_file_service/test_fastq_file_service.py
+++ b/tests/services/fastq_file_service/test_fastq_file_service.py
@@ -9,7 +9,9 @@
from cg.services.fastq_concatenation_service.utils import generate_concatenated_fastq_delivery_path
-def test_empty_directory(fastq_file_service: FastqConcatenationService, tmp_path):
+def test_empty_directory(
+ fastq_file_service: FastqConcatenationService, tmp_path: Path, sample_id: str
+):
# GIVEN an empty directory
# GIVEN output files
@@ -18,6 +20,7 @@ def test_empty_directory(fastq_file_service: FastqConcatenationService, tmp_path
# WHEN concatenating the reads
fastq_file_service.concatenate(
+ sample_id=sample_id,
fastq_directory=tmp_path,
forward_output_path=forward_output_path,
reverse_output_path=reverse_output_path,
@@ -28,7 +31,9 @@ def test_empty_directory(fastq_file_service: FastqConcatenationService, tmp_path
assert not reverse_output_path.exists()
-def test_concatenate(fastq_file_service: FastqConcatenationService, fastqs_dir: Path):
+def test_concatenate(
+ fastq_file_service: FastqConcatenationService, fastqs_dir: Path, sample_id: str
+):
# GIVEN a directory with forward and reverse reads
# GIVEN output files for the concatenated reads
@@ -37,6 +42,7 @@ def test_concatenate(fastq_file_service: FastqConcatenationService, fastqs_dir:
# WHEN concatenating the reads
fastq_file_service.concatenate(
+ sample_id=sample_id,
fastq_directory=fastqs_dir,
forward_output_path=forward_output_path,
reverse_output_path=reverse_output_path,
@@ -57,19 +63,25 @@ def test_concatenate(fastq_file_service: FastqConcatenationService, fastqs_dir:
def test_concatenate_when_output_exists(
- fastq_file_service: FastqConcatenationService, fastqs_dir: Path
+ fastq_file_service: FastqConcatenationService,
+ fastq_dir_existing_concatenated_files: Path,
+ sample_id: str,
):
+ """Test that existing concatenated files are overwritten when already existing."""
# GIVEN a directory with forward and reverse reads
- existing_fastq_files = list(fastqs_dir.iterdir())
- existing_forward: Path = existing_fastq_files[0]
+ forward_output_path = Path(fastq_dir_existing_concatenated_files, "forward.fastq.gz")
+ reverse_output_path = Path(fastq_dir_existing_concatenated_files, "reverse.fastq.gz")
# GIVEN that the forward output file already exists
- forward_output_path = existing_forward
- reverse_output_path = Path(fastqs_dir, "reverse.fastq.gz")
+ assert forward_output_path.exists()
+ assert reverse_output_path.exists()
+ assert "Existing" in forward_output_path.read_text()
+ assert "Existing" in reverse_output_path.read_text()
# WHEN concatenating the reads
fastq_file_service.concatenate(
- fastq_directory=fastqs_dir,
+ sample_id=sample_id,
+ fastq_directory=fastq_dir_existing_concatenated_files,
forward_output_path=forward_output_path,
reverse_output_path=reverse_output_path,
remove_raw=True,
@@ -82,14 +94,16 @@ def test_concatenate_when_output_exists(
# THEN the concatenated forward reads only contain forward reads
assert "forward" in forward_output_path.read_text()
assert "reverse" not in forward_output_path.read_text()
+ assert "Existing" not in forward_output_path.read_text()
# THEN the concatenated reverse reads only contain reverse reads
assert "reverse" in reverse_output_path.read_text()
assert "forward" not in reverse_output_path.read_text()
+ assert "Existing" not in reverse_output_path.read_text()
def test_concatenate_missing_reverse(
- fastq_file_service: FastqConcatenationService, fastqs_forward: Path, tmp_path
+ fastq_file_service: FastqConcatenationService, fastqs_forward: Path, tmp_path, sample_id: str
):
# GIVEN a directory with forward reads only
@@ -99,6 +113,7 @@ def test_concatenate_missing_reverse(
# WHEN concatenating the reads
fastq_file_service.concatenate(
+ sample_id=sample_id,
fastq_directory=fastqs_forward,
forward_output_path=forward_output_path,
reverse_output_path=reverse_output_path,
@@ -111,6 +126,48 @@ def test_concatenate_missing_reverse(
assert not reverse_output_path.exists()
+def test_concatenate_fastqs_multiple_samples_in_dir(
+ fastqs_multiple_samples: Path,
+ fastq_file_service: FastqConcatenationService,
+ sample_id: str,
+ another_sample_id: str,
+ tmp_path: Path,
+):
+ # GIVEN a fastq directory with fastq files for multiple samples that should be concatenated
+ samples: list[str] = [sample_id, another_sample_id]
+
+ # GIVEN output files for the concatenated reads
+ for fastq_sample in samples:
+ forward_output_path = Path(tmp_path, f"{fastq_sample}_forward.fastq.gz")
+ reverse_output_path = Path(tmp_path, f"{fastq_sample}_reverse.fastq.gz")
+
+ # WHEN concatenating the reads
+ fastq_file_service.concatenate(
+ sample_id=fastq_sample,
+ fastq_directory=fastqs_multiple_samples,
+ forward_output_path=forward_output_path,
+ reverse_output_path=reverse_output_path,
+ remove_raw=True,
+ )
+
+ not_current_sample: str = another_sample_id if fastq_sample == sample_id else sample_id
+ # THEN the output files should exist
+ assert forward_output_path.exists()
+ assert reverse_output_path.exists()
+
+ # THEN the concatenated forward reads only contain forward reads
+ assert "forward" in forward_output_path.read_text()
+ assert "reverse" not in forward_output_path.read_text()
+ assert fastq_sample in forward_output_path.read_text()
+ assert not_current_sample not in forward_output_path.read_text()
+
+ # THEN the concatenated reverse reads only contain reverse reads
+ assert "reverse" in reverse_output_path.read_text()
+ assert "forward" not in reverse_output_path.read_text()
+ assert fastq_sample in reverse_output_path.read_text()
+ assert not_current_sample not in reverse_output_path.read_text()
+
+
@pytest.mark.parametrize(
"fastq_directory, sample_name, direction, expected_output_path",
[
diff --git a/tests/services/file_delivery/delivery_file_service/test_service.py b/tests/services/file_delivery/delivery_file_service/test_service.py
index 256869e81e..7315fa4514 100644
--- a/tests/services/file_delivery/delivery_file_service/test_service.py
+++ b/tests/services/file_delivery/delivery_file_service/test_service.py
@@ -1,6 +1,5 @@
from unittest import mock
from unittest.mock import Mock
-
from cg.services.deliver_files.deliver_files_service.deliver_files_service import (
DeliverFilesService,
)
@@ -13,7 +12,6 @@ def test_file_delivery_service_no_files(empty_delivery_files: DeliveryFiles):
file_delivery_service = DeliverFilesService(
delivery_file_manager_service=Mock(),
move_file_service=Mock(),
- file_filter=Mock(),
file_formatter_service=Mock(),
rsync_service=Mock(),
tb_service=Mock(),
diff --git a/tests/services/file_delivery/delivery_file_service/test_service_builder.py b/tests/services/file_delivery/delivery_file_service/test_service_builder.py
index 1a16496a0a..29fe6d17af 100644
--- a/tests/services/file_delivery/delivery_file_service/test_service_builder.py
+++ b/tests/services/file_delivery/delivery_file_service/test_service_builder.py
@@ -5,10 +5,11 @@
from pydantic import BaseModel
from cg.constants import DataDelivery, Workflow
+from cg.services.deliver_files.constants import DeliveryDestination, DeliveryStructure
from cg.services.deliver_files.deliver_files_service.deliver_files_service import (
DeliverFilesService,
)
-from cg.services.deliver_files.deliver_files_service.deliver_files_service_factory import (
+from cg.services.deliver_files.factory import (
DeliveryServiceFactory,
)
from cg.services.deliver_files.file_fetcher.abstract import FetchDeliveryFilesService
@@ -17,12 +18,29 @@
)
from cg.services.deliver_files.file_fetcher.analysis_service import AnalysisDeliveryFileFetcher
from cg.services.deliver_files.file_fetcher.raw_data_service import RawDataDeliveryFileFetcher
-from cg.services.deliver_files.file_formatter.utils.sample_concatenation_service import (
+from cg.services.deliver_files.file_formatter.files.mutant_service import (
+ MutantFileFormatter,
+)
+from cg.services.deliver_files.file_formatter.files.concatenation_service import (
SampleFileConcatenationFormatter,
)
-from cg.services.deliver_files.file_formatter.utils.sample_service import SampleFileFormatter
-from cg.services.deliver_files.file_mover.service import DeliveryFilesMover
+from cg.services.deliver_files.file_formatter.files.sample_service import (
+ SampleFileFormatter,
+)
+from cg.services.deliver_files.file_formatter.path_name.abstract import PathNameFormatter
+from cg.services.deliver_files.file_formatter.path_name.flat_structure import (
+ FlatStructurePathFormatter,
+)
+from cg.services.deliver_files.file_formatter.path_name.nested_structure import (
+ NestedStructurePathFormatter,
+)
+from cg.services.deliver_files.file_mover.abstract import DestinationFilesMover
+from cg.services.deliver_files.file_mover.base_service import BaseDestinationFilesMover
+from cg.services.deliver_files.file_mover.customer_inbox_service import (
+ CustomerInboxDestinationFilesMover,
+)
from cg.services.deliver_files.tag_fetcher.abstract import FetchDeliveryFileTagsService
+from cg.services.deliver_files.tag_fetcher.fohm_upload_service import FOHMUploadTagsFetcher
from cg.services.deliver_files.tag_fetcher.sample_and_case_service import (
SampleAndCaseDeliveryTagsFetcher,
)
@@ -36,9 +54,14 @@ class DeliveryServiceScenario(BaseModel):
delivery_type: DataDelivery
expected_tag_fetcher: type[FetchDeliveryFileTagsService]
expected_file_fetcher: type[FetchDeliveryFilesService]
- expected_file_mover: type[DeliveryFilesMover]
- expected_sample_file_formatter: type[SampleFileFormatter | SampleFileConcatenationFormatter]
+ expected_file_mover: type[DestinationFilesMover]
+ expected_sample_file_formatter: type[
+ SampleFileFormatter | SampleFileConcatenationFormatter | MutantFileFormatter
+ ]
+ expected_path_name_formatter: type[PathNameFormatter]
store_name: str
+ delivery_destination: DeliveryDestination
+ delivery_structure: DeliveryStructure
@pytest.mark.parametrize(
@@ -50,9 +73,12 @@ class DeliveryServiceScenario(BaseModel):
delivery_type=DataDelivery.FASTQ,
expected_tag_fetcher=SampleAndCaseDeliveryTagsFetcher,
expected_file_fetcher=RawDataDeliveryFileFetcher,
- expected_file_mover=DeliveryFilesMover,
+ expected_file_mover=CustomerInboxDestinationFilesMover,
expected_sample_file_formatter=SampleFileConcatenationFormatter,
+ expected_path_name_formatter=NestedStructurePathFormatter,
store_name="microbial_store",
+ delivery_destination=DeliveryDestination.CUSTOMER,
+ delivery_structure=DeliveryStructure.NESTED,
),
DeliveryServiceScenario(
app_tag="VWGDPTR001",
@@ -60,9 +86,12 @@ class DeliveryServiceScenario(BaseModel):
delivery_type=DataDelivery.ANALYSIS_FILES,
expected_tag_fetcher=SampleAndCaseDeliveryTagsFetcher,
expected_file_fetcher=AnalysisDeliveryFileFetcher,
- expected_file_mover=DeliveryFilesMover,
- expected_sample_file_formatter=SampleFileFormatter,
+ expected_file_mover=CustomerInboxDestinationFilesMover,
+ expected_sample_file_formatter=MutantFileFormatter,
+ expected_path_name_formatter=NestedStructurePathFormatter,
store_name="mutant_store",
+ delivery_destination=DeliveryDestination.CUSTOMER,
+ delivery_structure=DeliveryStructure.NESTED,
),
DeliveryServiceScenario(
app_tag="PANKTTR020",
@@ -70,16 +99,46 @@ class DeliveryServiceScenario(BaseModel):
delivery_type=DataDelivery.FASTQ_ANALYSIS,
expected_tag_fetcher=SampleAndCaseDeliveryTagsFetcher,
expected_file_fetcher=RawDataAndAnalysisDeliveryFileFetcher,
- expected_file_mover=DeliveryFilesMover,
+ expected_file_mover=CustomerInboxDestinationFilesMover,
expected_sample_file_formatter=SampleFileFormatter,
+ expected_path_name_formatter=NestedStructurePathFormatter,
store_name="applications_store",
+ delivery_destination=DeliveryDestination.CUSTOMER,
+ delivery_structure=DeliveryStructure.NESTED,
+ ),
+ DeliveryServiceScenario(
+ app_tag="VWGDPTR001",
+ data_analysis=Workflow.MUTANT,
+ delivery_type=DataDelivery.ANALYSIS_FILES,
+ expected_tag_fetcher=FOHMUploadTagsFetcher,
+ expected_file_fetcher=AnalysisDeliveryFileFetcher,
+ expected_file_mover=BaseDestinationFilesMover,
+ expected_sample_file_formatter=MutantFileFormatter,
+ expected_path_name_formatter=FlatStructurePathFormatter,
+ store_name="mutant_store",
+ delivery_destination=DeliveryDestination.FOHM,
+ delivery_structure=DeliveryStructure.FLAT,
+ ),
+ DeliveryServiceScenario(
+ app_tag="VWGDPTR001",
+ data_analysis=Workflow.MUTANT,
+ delivery_type=DataDelivery.ANALYSIS_FILES,
+ expected_tag_fetcher=SampleAndCaseDeliveryTagsFetcher,
+ expected_file_fetcher=AnalysisDeliveryFileFetcher,
+ expected_file_mover=BaseDestinationFilesMover,
+ expected_sample_file_formatter=MutantFileFormatter,
+ expected_path_name_formatter=FlatStructurePathFormatter,
+ store_name="mutant_store",
+ delivery_destination=DeliveryDestination.BASE,
+ delivery_structure=DeliveryStructure.FLAT,
),
],
- ids=["microbial-fastq", "SARS-COV2", "Targeted"],
+ ids=["microbial-fastq", "SARS-COV2", "Targeted", "FOHM Upload", "base"],
)
def test_build_delivery_service(scenario: DeliveryServiceScenario, request: FixtureRequest):
# GIVEN a delivery service builder with mocked store and hk_api
builder = DeliveryServiceFactory(
+ lims_api=MagicMock(),
store=request.getfixturevalue(scenario.store_name),
hk_api=MagicMock(),
rsync_service=MagicMock(),
@@ -96,7 +155,11 @@ def test_build_delivery_service(scenario: DeliveryServiceScenario, request: Fixt
]
# WHEN building a delivery service
- delivery_service: DeliverFilesService = builder.build_delivery_service(case=case_mock)
+ delivery_service: DeliverFilesService = builder.build_delivery_service(
+ case=case_mock,
+ delivery_destination=scenario.delivery_destination,
+ delivery_structure=scenario.delivery_structure,
+ )
# THEN the correct file formatter and file fetcher services are used
assert isinstance(delivery_service.file_manager.tags_fetcher, scenario.expected_tag_fetcher)
@@ -106,3 +169,12 @@ def test_build_delivery_service(scenario: DeliveryServiceScenario, request: Fixt
delivery_service.file_formatter.sample_file_formatter,
scenario.expected_sample_file_formatter,
)
+ if not isinstance(delivery_service.file_formatter.sample_file_formatter, MutantFileFormatter):
+ assert isinstance(
+ delivery_service.file_formatter.sample_file_formatter.path_name_formatter,
+ scenario.expected_path_name_formatter,
+ )
+ assert isinstance(
+ delivery_service.file_formatter.case_file_formatter.path_name_formatter,
+ scenario.expected_path_name_formatter,
+ )
diff --git a/tests/services/file_delivery/file_fetcher/test_file_fetching_service.py b/tests/services/file_delivery/file_fetcher/test_file_fetching_service.py
index 50b770bcfc..2974d6aa66 100644
--- a/tests/services/file_delivery/file_fetcher/test_file_fetching_service.py
+++ b/tests/services/file_delivery/file_fetcher/test_file_fetching_service.py
@@ -8,16 +8,19 @@
@pytest.mark.parametrize(
- "expected_delivery_files,delivery_file_service",
+ "expected_delivery_files,delivery_file_service,sample_id_to_fetch",
[
- ("expected_fastq_delivery_files", "raw_data_delivery_service"),
- ("expected_analysis_delivery_files", "analysis_delivery_service"),
- ("expected_bam_delivery_files", "bam_data_delivery_service"),
+ ("expected_fohm_delivery_files", "fohm_data_delivery_service", "empty_sample"),
+ ("expected_fastq_delivery_files", "raw_data_delivery_service", "empty_sample"),
+ ("expected_analysis_delivery_files", "analysis_delivery_service", "empty_sample"),
+ ("expected_bam_delivery_files", "bam_data_delivery_service", "empty_sample"),
+ ("expected_bam_delivery_files_single_sample", "bam_data_delivery_service", "sample_id"),
],
)
def test_get_files_to_deliver(
expected_delivery_files: DeliveryFiles,
delivery_file_service: FetchDeliveryFilesService,
+ sample_id_to_fetch: str | None,
case_id: str,
request,
):
@@ -25,9 +28,12 @@ def test_get_files_to_deliver(
# GIVEN a case id, samples that are present in Housekeeper and a delivery service
delivery_file_service = request.getfixturevalue(delivery_file_service)
expected_delivery_files = request.getfixturevalue(expected_delivery_files)
+ sample_id: str | None = request.getfixturevalue(sample_id_to_fetch)
# WHEN getting the files to deliver
- delivery_files: DeliveryFiles = delivery_file_service.get_files_to_deliver(case_id)
+ delivery_files: DeliveryFiles = delivery_file_service.get_files_to_deliver(
+ case_id=case_id, sample_id=sample_id
+ )
# THEN assert that the files to deliver are fetched
assert delivery_files == expected_delivery_files
diff --git a/tests/services/file_delivery/file_filter/test_sample_filter_service.py b/tests/services/file_delivery/file_filter/test_sample_filter_service.py
deleted file mode 100644
index 200f43c0ad..0000000000
--- a/tests/services/file_delivery/file_filter/test_sample_filter_service.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from cg.services.deliver_files.file_fetcher.models import DeliveryFiles
-from cg.services.deliver_files.file_filter.sample_service import SampleFileFilter
-
-
-def test_filter_delivery_files(expected_fastq_delivery_files: DeliveryFiles, sample_id: str):
- """Test to filter delivery files."""
-
- # GIVEN a delivery files object with multiple sample ids and a filter delivery files service
- filter_service = SampleFileFilter()
- samples_ids: list[str] = [
- sample.sample_id for sample in expected_fastq_delivery_files.sample_files
- ]
- assert len(set(samples_ids)) > 1
-
- # WHEN filtering the delivery files
- filtered_delivery_files = filter_service.filter_delivery_files(
- expected_fastq_delivery_files, sample_id
- )
-
- # THEN assert that the delivery files only contains the sample with the given sample id
- for sample_file in filtered_delivery_files.sample_files:
- assert sample_file.sample_id == sample_id
diff --git a/tests/services/file_delivery/file_formatter/__init__.py b/tests/services/file_delivery/file_formatter/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/services/file_delivery/file_formatter/destination/__init__.py b/tests/services/file_delivery/file_formatter/destination/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/services/file_delivery/file_formatter/test_formatting_service.py b/tests/services/file_delivery/file_formatter/destination/test_formatting_service.py
similarity index 78%
rename from tests/services/file_delivery/file_formatter/test_formatting_service.py
rename to tests/services/file_delivery/file_formatter/destination/test_formatting_service.py
index d7a012842a..5771810ce9 100644
--- a/tests/services/file_delivery/file_formatter/test_formatting_service.py
+++ b/tests/services/file_delivery/file_formatter/destination/test_formatting_service.py
@@ -1,21 +1,20 @@
from pathlib import Path
import mock
+import pytest
from cg.services.deliver_files.file_fetcher.models import (
- DeliveryFiles,
- SampleFile,
CaseFile,
+ DeliveryFiles,
DeliveryMetaData,
+ SampleFile,
)
-from cg.services.deliver_files.file_formatter.abstract import (
- DeliveryFileFormattingService,
+from cg.services.deliver_files.file_formatter.destination.abstract import (
+ DeliveryDestinationFormatter,
)
-import pytest
-
-from cg.services.deliver_files.file_formatter.models import (
- FormattedFiles,
+from cg.services.deliver_files.file_formatter.destination.models import (
FormattedFile,
+ FormattedFiles,
)
@@ -39,7 +38,7 @@
],
)
def test_reformat_files(
- formatter_service: DeliveryFileFormattingService,
+ formatter_service: DeliveryDestinationFormatter,
formatted_case_files: list[FormattedFile],
formatted_sample_files: list[FormattedFile],
case_files: list[CaseFile],
@@ -69,12 +68,15 @@ def test_reformat_files(
files.extend(formatted_case_files)
expected_formatted_files = FormattedFiles(files=files)
- with mock.patch(
- "cg.services.deliver_files.file_formatter.utils.sample_service.SampleFileFormatter.format_files",
- return_value=formatted_sample_files,
- ), mock.patch(
- "cg.services.deliver_files.file_formatter.utils.case_service.CaseFileFormatter.format_files",
- return_value=formatted_case_files,
+ with (
+ mock.patch(
+ "cg.services.deliver_files.file_formatter.files.sample_service.SampleFileFormatter.format_files",
+ return_value=formatted_sample_files,
+ ),
+ mock.patch(
+ "cg.services.deliver_files.file_formatter.files.case_service.CaseFileFormatter.format_files",
+ return_value=formatted_case_files,
+ ),
):
# WHEN reformatting the delivery files
formatted_files: FormattedFiles = formatter_service.format_files(mock_delivery_files)
diff --git a/tests/services/file_delivery/file_formatter/files/__init__.py b/tests/services/file_delivery/file_formatter/files/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/services/file_delivery/file_formatter/files/test_formatter_utils.py b/tests/services/file_delivery/file_formatter/files/test_formatter_utils.py
new file mode 100644
index 0000000000..4d7b241b57
--- /dev/null
+++ b/tests/services/file_delivery/file_formatter/files/test_formatter_utils.py
@@ -0,0 +1,181 @@
+import os
+from unittest.mock import Mock
+import pytest
+from pathlib import Path
+
+from cg.services.deliver_files.file_formatter.files.abstract import FileFormatter
+from cg.services.deliver_files.file_formatter.files.mutant_service import (
+ MutantFileFormatter,
+)
+from cg.services.fastq_concatenation_service.fastq_concatenation_service import (
+ FastqConcatenationService,
+)
+from cg.services.deliver_files.file_fetcher.models import (
+ CaseFile,
+ SampleFile,
+)
+from cg.services.deliver_files.file_formatter.destination.models import FormattedFile
+from cg.services.deliver_files.file_formatter.files.case_service import (
+ CaseFileFormatter,
+)
+from cg.services.deliver_files.file_formatter.files.concatenation_service import (
+ SampleFileConcatenationFormatter,
+)
+from cg.services.deliver_files.file_formatter.files.sample_service import (
+ SampleFileFormatter,
+ FileManager,
+)
+from cg.services.deliver_files.file_formatter.path_name.flat_structure import (
+ FlatStructurePathFormatter,
+)
+from cg.services.deliver_files.file_formatter.path_name.nested_structure import (
+ NestedStructurePathFormatter,
+)
+
+
+@pytest.mark.parametrize(
+ "moved_files,expected_formatted_files,file_formatter",
+ [
+ (
+ "expected_moved_analysis_case_delivery_files",
+ "expected_formatted_analysis_case_files",
+ CaseFileFormatter(
+ file_manager=FileManager(), path_name_formatter=NestedStructurePathFormatter()
+ ),
+ ),
+ (
+ "expected_moved_analysis_sample_delivery_files",
+ "expected_formatted_analysis_sample_files",
+ SampleFileFormatter(
+ file_manager=FileManager(), path_name_formatter=NestedStructurePathFormatter()
+ ),
+ ),
+ (
+ "fastq_concatenation_sample_files",
+ "expected_concatenated_fastq_formatted_files",
+ SampleFileConcatenationFormatter(
+ file_manager=FileManager(),
+ path_name_formatter=NestedStructurePathFormatter(),
+ concatenation_service=FastqConcatenationService(),
+ ),
+ ),
+ (
+ "fastq_concatenation_sample_files_flat",
+ "expected_concatenated_fastq_flat_formatted_files",
+ SampleFileConcatenationFormatter(
+ file_manager=FileManager(),
+ path_name_formatter=FlatStructurePathFormatter(),
+ concatenation_service=FastqConcatenationService(),
+ ),
+ ),
+ ],
+)
+def test_file_formatters(
+ moved_files: list[CaseFile | SampleFile],
+ expected_formatted_files: list[FormattedFile],
+ file_formatter: FileFormatter,
+ request,
+):
+ # GIVEN existing case files, a case file formatter and a ticket directory path and a customer inbox
+ moved_files: list[CaseFile | SampleFile] = request.getfixturevalue(moved_files)
+ expected_formatted_files: list[FormattedFile] = request.getfixturevalue(
+ expected_formatted_files
+ )
+ delivery_path: Path = moved_files[0].file_path.parent
+
+ os.makedirs(delivery_path, exist_ok=True)
+
+ for moved_file in moved_files:
+ moved_file.file_path.touch()
+
+ # WHEN formatting the case files
+ formatted_files: list[FormattedFile] = file_formatter.format_files(
+ moved_files=moved_files,
+ delivery_path=delivery_path,
+ )
+
+ # THEN the case files should be formatted
+ assert formatted_files == expected_formatted_files
+ for file in formatted_files:
+ assert file.formatted_path.exists()
+ assert not file.original_path.exists()
+
+
+def test_mutant_file_formatter(
+ mutant_moved_files: list[SampleFile],
+ expected_mutant_formatted_files: list[FormattedFile],
+ lims_naming_metadata: str,
+):
+ # GIVEN existing ticket directory path and a customer inbox
+ ticket_dir_path: Path = mutant_moved_files[0].file_path.parent
+
+ os.makedirs(ticket_dir_path, exist_ok=True)
+
+ for moved_file in mutant_moved_files:
+ moved_file.file_path.touch()
+
+ lims_mock = Mock()
+ lims_mock.get_sample_region_and_lab_code.return_value = lims_naming_metadata
+ file_formatter = MutantFileFormatter(
+ file_manager=FileManager(),
+ file_formatter=SampleFileConcatenationFormatter(
+ file_manager=FileManager(),
+ path_name_formatter=NestedStructurePathFormatter(),
+ concatenation_service=FastqConcatenationService(),
+ ),
+ lims_api=lims_mock,
+ )
+
+ # WHEN formatting the files
+ formatted_files: list[FormattedFile] = file_formatter.format_files(
+ moved_files=mutant_moved_files,
+ delivery_path=ticket_dir_path,
+ )
+
+ # THEN the files should be formatted
+ assert formatted_files == expected_mutant_formatted_files
+ for file in formatted_files:
+ assert file.formatted_path.exists()
+ assert not file.original_path.exists()
+
+
+def test_concatenation_sample_name_match():
+ # GIVEN a concatenation service and a list of file paths and a sample name that is a number
+ sample_name = "12"
+ concatentation_formatter = SampleFileConcatenationFormatter(
+ file_manager=Mock(),
+ path_name_formatter=Mock(),
+ concatenation_service=Mock(),
+ )
+ # GIVEN two sets of file paths that should match and not match the sample name
+ should_match_file_paths = [
+ Path("path/to/FC_12_L001_R1_001.fastq.gz"),
+ Path("path/to/FC_12_L002_R1_001.fastq.gz"),
+ Path("path/to/FC_12_L001_R2_001.fastq.gz"),
+ Path("path/to/FC_12_L002_R2_001.fastq.gz"),
+ ]
+ should_not_match_file_paths = [
+ Path("path/to/FC_123_L001_R1_001.fastq.gz"),
+ Path("path/to/FC_123_L002_R1_001.fastq.gz"),
+ Path("path/to/FC_123_L001_R2_001.fastq.gz"),
+ Path("path/to/FC_123_L002_R2_001.fastq.gz"),
+ ]
+
+ # WHEN checking if the file paths match the sample name
+
+ # THEN the file paths that should match should return True and the file paths that should not match should return False
+ for file_path in should_match_file_paths:
+ assert (
+ concatentation_formatter._has_expected_sample_name_format_match(
+ file_path=file_path, sample_name=sample_name
+ )
+ is True
+ )
+
+ for file_path in should_not_match_file_paths:
+ assert (
+ concatentation_formatter._has_expected_sample_name_format_match(
+ file_path=file_path, sample_name=sample_name
+ )
+ is False
+ )
diff --git a/tests/services/file_delivery/file_formatter/path_name_formatters/__init__.py b/tests/services/file_delivery/file_formatter/path_name_formatters/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/services/file_delivery/file_formatter/path_name_formatters/test_path_name_formatters.py b/tests/services/file_delivery/file_formatter/path_name_formatters/test_path_name_formatters.py
new file mode 100644
index 0000000000..c43e3aa9f7
--- /dev/null
+++ b/tests/services/file_delivery/file_formatter/path_name_formatters/test_path_name_formatters.py
@@ -0,0 +1,54 @@
+import pytest
+
+from cg.services.deliver_files.file_fetcher.models import SampleFile
+from cg.services.deliver_files.file_formatter.destination.models import FormattedFile
+from cg.services.deliver_files.file_formatter.path_name.flat_structure import (
+ FlatStructurePathFormatter,
+)
+from cg.services.deliver_files.file_formatter.path_name.nested_structure import (
+ NestedStructurePathFormatter,
+)
+
+
+@pytest.mark.parametrize(
+ "sample_files,expected_formatted_files,path_name_formatter",
+ [
+ (
+ "expected_moved_analysis_sample_delivery_files",
+ "expected_formatted_analysis_sample_files",
+ NestedStructurePathFormatter(),
+ ),
+ (
+ "expected_moved_analysis_sample_delivery_files",
+ "expected_flat_formatted_analysis_sample_files",
+ FlatStructurePathFormatter(),
+ ),
+ ],
+)
+def test_path_name_formatters(
+ sample_files: list[SampleFile],
+ expected_formatted_files: list[FormattedFile],
+ path_name_formatter,
+ request,
+):
+ # GIVEN existing sample files and a sample file formatter
+ sample_files: list[SampleFile] = request.getfixturevalue(sample_files)
+ expected_formatted_files: list[FormattedFile] = request.getfixturevalue(
+ expected_formatted_files
+ )
+
+ # WHEN formatting the sample files
+ formatted_files: list[FormattedFile] = [
+ FormattedFile(
+ formatted_path=path_name_formatter.format_file_path(
+ file_path=sample_file.file_path,
+ provided_name=sample_file.sample_name,
+ provided_id=sample_file.sample_id,
+ ),
+ original_path=sample_file.file_path,
+ )
+ for sample_file in sample_files
+ ]
+
+ # THEN the sample files should be formatted
+ assert formatted_files == expected_formatted_files
diff --git a/tests/services/file_delivery/file_formatter/utils/test_formatter_utils.py b/tests/services/file_delivery/file_formatter/utils/test_formatter_utils.py
deleted file mode 100644
index 2245fb7f78..0000000000
--- a/tests/services/file_delivery/file_formatter/utils/test_formatter_utils.py
+++ /dev/null
@@ -1,80 +0,0 @@
-import os
-import pytest
-from pathlib import Path
-
-from cg.services.fastq_concatenation_service.fastq_concatenation_service import (
- FastqConcatenationService,
-)
-from cg.services.deliver_files.file_fetcher.models import (
- CaseFile,
- SampleFile,
-)
-from cg.services.deliver_files.file_formatter.models import FormattedFile
-from cg.services.deliver_files.file_formatter.utils.case_service import (
- CaseFileFormatter,
-)
-from cg.services.deliver_files.file_formatter.utils.sample_concatenation_service import (
- SampleFileConcatenationFormatter,
-)
-from cg.services.deliver_files.file_formatter.utils.sample_service import (
- SampleFileFormatter,
- FileManagingService,
- SampleFileNameFormatter,
-)
-
-
-@pytest.mark.parametrize(
- "moved_files,expected_formatted_files,file_formatter",
- [
- (
- "expected_moved_analysis_case_delivery_files",
- "expected_formatted_analysis_case_files",
- CaseFileFormatter(),
- ),
- (
- "expected_moved_analysis_sample_delivery_files",
- "expected_formatted_analysis_sample_files",
- SampleFileFormatter(
- file_manager=FileManagingService(), file_name_formatter=SampleFileNameFormatter()
- ),
- ),
- (
- "fastq_concatenation_sample_files",
- "expected_concatenated_fastq_formatted_files",
- SampleFileConcatenationFormatter(
- file_manager=FileManagingService(),
- file_formatter=SampleFileNameFormatter(),
- concatenation_service=FastqConcatenationService(),
- ),
- ),
- ],
-)
-def test_file_formatter_utils(
- moved_files: list[CaseFile | SampleFile],
- expected_formatted_files: list[FormattedFile],
- file_formatter: CaseFileFormatter | SampleFileFormatter | SampleFileConcatenationFormatter,
- request,
-):
- # GIVEN existing case files, a case file formatter and a ticket directory path and a customer inbox
- moved_files: list[CaseFile | SampleFile] = request.getfixturevalue(moved_files)
- expected_formatted_files: list[FormattedFile] = request.getfixturevalue(
- expected_formatted_files
- )
- ticket_dir_path: Path = moved_files[0].file_path.parent
-
- os.makedirs(ticket_dir_path, exist_ok=True)
-
- for moved_file in moved_files:
- moved_file.file_path.touch()
-
- # WHEN formatting the case files
- formatted_files: list[FormattedFile] = file_formatter.format_files(
- moved_files=moved_files,
- ticket_dir_path=ticket_dir_path,
- )
-
- # THEN the case files should be formatted
- assert formatted_files == expected_formatted_files
- for file in formatted_files:
- assert file.formatted_path.exists()
- assert not file.original_path.exists()
diff --git a/tests/services/file_delivery/file_mover/test_file_mover_service.py b/tests/services/file_delivery/file_mover/test_file_mover_service.py
index 068a771835..4b8ab40c93 100644
--- a/tests/services/file_delivery/file_mover/test_file_mover_service.py
+++ b/tests/services/file_delivery/file_mover/test_file_mover_service.py
@@ -3,21 +3,37 @@
import pytest
from cg.services.deliver_files.file_fetcher.models import DeliveryFiles
-from cg.services.deliver_files.file_mover.service import (
- DeliveryFilesMover,
+from cg.services.deliver_files.file_mover.customer_inbox_service import (
+ CustomerInboxDestinationFilesMover,
)
+from cg.services.deliver_files.file_mover.base_service import BaseDestinationFilesMover
+from cg.services.deliver_files.utils import FileMover, FileManager
@pytest.mark.parametrize(
- "expected_moved_delivery_files,delivery_files",
+ "expected_moved_delivery_files,delivery_files,move_files_service",
[
- ("expected_moved_fastq_delivery_files", "expected_fastq_delivery_files"),
- ("expected_moved_analysis_delivery_files", "expected_analysis_delivery_files"),
+ (
+ "expected_moved_fastq_delivery_files",
+ "expected_fastq_delivery_files",
+ CustomerInboxDestinationFilesMover(FileMover(FileManager())),
+ ),
+ (
+ "expected_moved_analysis_delivery_files",
+ "expected_analysis_delivery_files",
+ CustomerInboxDestinationFilesMover(FileMover(FileManager())),
+ ),
+ (
+ "expected_moved_upload_files",
+ "expected_upload_files",
+ BaseDestinationFilesMover(FileMover(FileManager())),
+ ),
],
)
def test_move_files(
expected_moved_delivery_files: DeliveryFiles,
delivery_files: DeliveryFiles,
+ move_files_service: CustomerInboxDestinationFilesMover,
tmp_path,
request,
):
@@ -28,7 +44,6 @@ def test_move_files(
delivery_files: DeliveryFiles = request.getfixturevalue(delivery_files)
# WHEN moving the delivery files
- move_files_service = DeliveryFilesMover()
moved_delivery_files: DeliveryFiles = move_files_service.move_files(
delivery_files=delivery_files, delivery_base_path=tmp_path
)
diff --git a/tests/services/file_delivery/tag_fetcher/test_tag_service.py b/tests/services/file_delivery/tag_fetcher/test_tag_service.py
index 6e54fdc73f..e1b541b15f 100644
--- a/tests/services/file_delivery/tag_fetcher/test_tag_service.py
+++ b/tests/services/file_delivery/tag_fetcher/test_tag_service.py
@@ -10,6 +10,7 @@
from cg.services.deliver_files.tag_fetcher.exc import (
FetchDeliveryFileTagsError,
)
+from cg.services.deliver_files.tag_fetcher.fohm_upload_service import FOHMUploadTagsFetcher
from cg.services.deliver_files.tag_fetcher.models import DeliveryFileTags
from cg.services.deliver_files.tag_fetcher.sample_and_case_service import (
SampleAndCaseDeliveryTagsFetcher,
@@ -64,3 +65,15 @@ def test_bam_delivery_tags_fetcher():
# THEN assert that the tags are fetched
assert tags.case_tags is None
assert tags.sample_tags == [{"bam"}]
+
+
+def test_fohm_upload_tags_fetcher():
+ # GIVEN a tag fetcher
+ test_fetcher = FOHMUploadTagsFetcher()
+
+ # WHEN fetching the tags for the files to deliver
+ tags: DeliveryFileTags = test_fetcher.fetch_tags(Workflow.MUTANT)
+
+ # THEN assert that the tags are fetched
+ assert tags.case_tags is None
+ assert tags.sample_tags == [{"consensus-sample"}, {"vcf-report"}]
diff --git a/tests/services/illumina/cleaning/test_clean_runs_service.py b/tests/services/illumina/cleaning/test_clean_runs_service.py
index e1aaa77b18..cc7c3c9827 100644
--- a/tests/services/illumina/cleaning/test_clean_runs_service.py
+++ b/tests/services/illumina/cleaning/test_clean_runs_service.py
@@ -11,16 +11,10 @@
from cg.apps.housekeeper.hk import HousekeeperAPI
from cg.constants import SequencingFileTag
from cg.constants.time import TWENTY_ONE_DAYS_IN_SECONDS
-from cg.exc import IlluminaCleanRunError, HousekeeperFileMissingError
-from cg.services.illumina.cleaning.clean_runs_service import (
- IlluminaCleanRunsService,
-)
+from cg.exc import HousekeeperFileMissingError, IlluminaCleanRunError
+from cg.services.illumina.cleaning.clean_runs_service import IlluminaCleanRunsService
from cg.store.exc import EntryNotFoundError
-from cg.store.models import (
- Sample,
- IlluminaSequencingRun,
- IlluminaSampleSequencingMetrics,
-)
+from cg.store.models import IlluminaSampleSequencingMetrics, IlluminaSequencingRun, Sample
from tests.store_helpers import StoreHelpers
@@ -299,15 +293,19 @@ def test_can_sequencing_run_be_deleted_no_spring_no_fastq(
):
"""Test that a sequencing run can not be deleted when it has no spring files and no fastq files."""
# GIVEN a sequencing run that can be deleted
- with mock.patch(
- "cg.services.illumina.cleaning.clean_runs_service.IlluminaCleanRunsService.is_directory_older_than_21_days",
- return_value=True,
- ), mock.patch(
- "cg.services.illumina.cleaning.clean_runs_service.IlluminaCleanRunsService.has_fastq_files_for_samples_in_housekeeper",
- return_value=False,
- ), mock.patch(
- "cg.services.illumina.cleaning.clean_runs_service.IlluminaCleanRunsService.has_spring_meta_data_files_for_samples_in_housekeeper",
- return_value=False,
+ with (
+ mock.patch(
+ "cg.services.illumina.cleaning.clean_runs_service.IlluminaCleanRunsService.is_directory_older_than_21_days",
+ return_value=True,
+ ),
+ mock.patch(
+ "cg.services.illumina.cleaning.clean_runs_service.IlluminaCleanRunsService.has_fastq_files_for_samples_in_housekeeper",
+ return_value=False,
+ ),
+ mock.patch(
+ "cg.services.illumina.cleaning.clean_runs_service.IlluminaCleanRunsService.has_spring_meta_data_files_for_samples_in_housekeeper",
+ return_value=False,
+ ),
):
# WHEN checking that the sequencing run can be deleted
diff --git a/tests/services/illumina/post_processing/test_housekeeper_storage.py b/tests/services/illumina/post_processing/test_housekeeper_storage.py
index ceea5bc2dc..df902c0f26 100644
--- a/tests/services/illumina/post_processing/test_housekeeper_storage.py
+++ b/tests/services/illumina/post_processing/test_housekeeper_storage.py
@@ -36,16 +36,19 @@ def test_add_fastq_files_to_housekeeper(
assert hk_api.get_files(tags=[SequencingFileTag.FASTQ], bundle=sample_id).count() == 0
# GIVEN a demultiplexed run dir data with a sample sheet and samples
- with patch.object(
- target=IlluminaRunDirectoryData,
- attribute="sample_sheet",
- new_callable=PropertyMock(
- return_value=novaseq_6000_post_1_5_kits_sample_sheet_with_selected_samples
+ with (
+ patch.object(
+ target=IlluminaRunDirectoryData,
+ attribute="sample_sheet",
+ new_callable=PropertyMock(
+ return_value=novaseq_6000_post_1_5_kits_sample_sheet_with_selected_samples
+ ),
+ ),
+ patch.object(
+ target=IlluminaRunDirectoryData,
+ attribute="get_demultiplexed_runs_dir",
+ return_value=tmp_demultiplexed_novaseq_6000_post_1_5_kits_path,
),
- ), patch.object(
- target=IlluminaRunDirectoryData,
- attribute="get_demultiplexed_runs_dir",
- return_value=tmp_demultiplexed_novaseq_6000_post_1_5_kits_path,
):
# WHEN adding the sample fastq files to Housekeeper
add_sample_fastq_files_to_housekeeper(
diff --git a/tests/services/orders/lims_service/test_order_lims_service.py b/tests/services/orders/lims_service/test_order_lims_service.py
new file mode 100644
index 0000000000..af30e418f4
--- /dev/null
+++ b/tests/services/orders/lims_service/test_order_lims_service.py
@@ -0,0 +1,223 @@
+import pytest
+
+from cg.constants import Workflow
+from cg.models.lims.sample import LimsSample
+from cg.services.orders.lims_service.service import OrderLimsService
+from cg.services.orders.validation.workflows.balsamic.models.order import BalsamicOrder
+from cg.services.orders.validation.workflows.fastq.models.order import FastqOrder
+from cg.services.orders.validation.workflows.fluffy.models.order import FluffyOrder
+from cg.services.orders.validation.workflows.microsalt.models.order import MicrosaltOrder
+from cg.services.orders.validation.workflows.mip_dna.models.order import MipDnaOrder
+from cg.services.orders.validation.workflows.mutant.models.order import MutantOrder
+from cg.services.orders.validation.workflows.rml.models.order import RmlOrder
+
+
+def test_to_lims_mip(mip_dna_order_to_submit):
+ # GIVEN a scout order for a trio
+ order_data = MipDnaOrder.model_validate(mip_dna_order_to_submit)
+ # WHEN parsing the order to format for LIMS import
+ new_samples = [sample for _, _, sample in order_data.enumerated_new_samples]
+ samples: list[LimsSample] = OrderLimsService._build_lims_sample(
+ customer="cust003",
+ samples=new_samples,
+ workflow=Workflow.MIP_DNA,
+ delivery_type=order_data.delivery_type,
+ skip_reception_control=order_data.skip_reception_control,
+ )
+
+ # THEN it should list all samples
+ assert len(samples) == 4
+
+ # THEN container should be 96 well plate for all samples
+ assert {sample.container for sample in samples} == {"96 well plate"}
+
+ # THEN container names should be the same for all samples
+ container_names = {sample.container_name for sample in samples if sample.container_name}
+ assert container_names == {"MipPlate"}
+
+ # ... and pick out relevant UDFs
+ first_sample: LimsSample = samples[0]
+ assert first_sample.well_position == "A:1"
+ assert first_sample.udfs.priority == "standard"
+ assert first_sample.udfs.application == "WGSPCFC030"
+ assert first_sample.udfs.source == "blood"
+ assert first_sample.udfs.customer == "cust003"
+ assert first_sample.udfs.volume == "54"
+
+ # THEN assert that the comment of a sample is a string
+ assert isinstance(samples[1].udfs.comment, str)
+
+
+def test_to_lims_fastq(fastq_order_to_submit):
+ # GIVEN a fastq order for two samples; normal vs. tumour
+ order_data = FastqOrder.model_validate(fastq_order_to_submit)
+
+ # WHEN parsing the order to format for LIMS
+ samples: list[LimsSample] = OrderLimsService._build_lims_sample(
+ customer="dummyCust",
+ samples=order_data.samples,
+ workflow=Workflow.RAW_DATA,
+ delivery_type=order_data.delivery_type,
+ skip_reception_control=order_data.skip_reception_control,
+ )
+
+ # THEN should "work"
+ assert len(samples) == 2
+ normal_sample = samples[0]
+ tumour_sample = samples[1]
+ # ... and pick out relevant UDF values
+ assert normal_sample.udfs.tumour is False
+ assert tumour_sample.udfs.tumour is True
+ assert normal_sample.udfs.volume == "54"
+
+
+@pytest.mark.xfail(reason="RML sample container validation not working")
+def test_to_lims_rml(rml_order_to_submit: dict):
+ # GIVEN a rml order for four samples
+ order_data = RmlOrder.model_validate(rml_order_to_submit)
+
+ # WHEN parsing for LIMS
+ samples: list[LimsSample] = OrderLimsService._build_lims_sample(
+ customer="cust000",
+ samples=order_data.samples,
+ workflow=Workflow.RAW_DATA,
+ delivery_type=order_data.delivery_type,
+ )
+
+ # THEN it should have found the same number of samples
+ assert len(samples) == 4
+
+ # THEN the relevant UDFs are parsed
+ first_sample = samples[0]
+ assert first_sample.udfs.pool == "pool1"
+ assert first_sample.udfs.application.startswith("RML")
+ assert first_sample.udfs.index == "IDT DupSeq 10 bp Set B"
+ assert first_sample.udfs.index_number == "3"
+ assert first_sample.udfs.rml_plate_name == "plate1"
+ assert first_sample.udfs.well_position_rml == "A:1"
+
+
+@pytest.mark.xfail(reason="Fluffy sample container validation not working")
+def test_to_lims_fluffy(fluffy_order_to_submit: dict):
+ # GIVEN a Fluffy order for four samples
+ order_data = FluffyOrder.model_validate(fluffy_order_to_submit)
+
+ # WHEN parsing for LIMS
+ samples: list[LimsSample] = OrderLimsService._build_lims_sample(
+ customer="cust000",
+ samples=order_data.samples,
+ workflow=Workflow.FLUFFY,
+ delivery_type=order_data.delivery_type,
+ )
+
+ # THEN it should have found the same number of samples
+ assert len(samples) == 4
+
+ # THEN the relevant UDFs are parsed
+ first_sample = samples[0]
+ assert first_sample.udfs.pool == "pool1"
+ assert first_sample.udfs.application.startswith("RML")
+ assert first_sample.udfs.index == "IDT DupSeq 10 bp Set B"
+ assert first_sample.udfs.index_number == "3"
+ assert first_sample.udfs.rml_plate_name == "plate1"
+ assert first_sample.udfs.well_position_rml == "A:1"
+
+
+def test_to_lims_microbial(microbial_order_to_submit: dict):
+ # GIVEN a microbial order for three samples
+ order_data = MicrosaltOrder.model_validate(microbial_order_to_submit)
+
+ # WHEN parsing for LIMS
+ samples: list[LimsSample] = OrderLimsService._build_lims_sample(
+ customer="cust000",
+ samples=order_data.samples,
+ workflow=Workflow.MICROSALT,
+ delivery_type=order_data.delivery_type,
+ skip_reception_control=order_data.skip_reception_control,
+ )
+ # THEN it should "work"
+
+ assert len(samples) == 5
+ # ... and pick out relevant UDFs
+ first_sample = samples[0].dict()
+ assert first_sample["udfs"]["priority"] == "research"
+ assert first_sample["udfs"]["organism"] == "C. jejuni"
+ assert first_sample["udfs"]["reference_genome"] == "NC_000001"
+ assert first_sample["udfs"]["extraction_method"] == "MagNaPure 96"
+ assert first_sample["udfs"]["volume"] == "20"
+
+
+def test_to_lims_sarscov2(mutant_order: MutantOrder):
+ # GIVEN a sarscov2 order for samples
+
+ # WHEN parsing for LIMS
+ samples: list[LimsSample] = OrderLimsService._build_lims_sample(
+ customer="cust000",
+ samples=mutant_order.samples,
+ workflow=Workflow.MUTANT,
+ delivery_type=mutant_order.delivery_type,
+ skip_reception_control=mutant_order.skip_reception_control,
+ )
+
+ # THEN it should have found the same number of samples
+ assert len(samples) == 6
+ # ... and pick out relevant UDFs
+ first_sample = samples[0].dict()
+ assert first_sample["udfs"]["collection_date"] == "2021-05-05"
+ assert first_sample["udfs"]["extraction_method"] == "MagNaPure 96"
+ assert first_sample["udfs"]["lab_code"] == "SE100 Karolinska"
+ assert first_sample["udfs"]["organism"] == "SARS-CoV-2"
+ assert first_sample["udfs"]["original_lab"] == "Karolinska University Hospital Solna"
+ assert first_sample["udfs"]["original_lab_address"] == "171 76 Stockholm"
+ assert first_sample["udfs"]["pre_processing_method"] == "COVIDSeq"
+ assert first_sample["udfs"]["priority"] == "research"
+ assert first_sample["udfs"]["reference_genome"] == "NC_111"
+ assert first_sample["udfs"]["region"] == "Stockholm"
+ assert first_sample["udfs"]["region_code"] == "01"
+ assert first_sample["udfs"]["selection_criteria"] == "Allmän övervakning"
+ assert first_sample["udfs"]["volume"] == "20"
+
+
+def test_to_lims_balsamic(balsamic_order_to_submit: dict):
+ # GIVEN a cancer order for a sample
+ order_data = BalsamicOrder.model_validate(balsamic_order_to_submit)
+
+ new_samples = [sample for _, _, sample in order_data.enumerated_new_samples]
+ # WHEN parsing the order to format for LIMS import
+ samples: list[LimsSample] = OrderLimsService._build_lims_sample(
+ customer="cust000",
+ samples=new_samples,
+ workflow=Workflow.BALSAMIC,
+ delivery_type=order_data.delivery_type,
+ skip_reception_control=order_data.skip_reception_control,
+ )
+ # THEN it should list all samples
+
+ assert len(samples) == 1
+ # ... and determine the container, container name, and well position
+
+ container_names = {sample.container_name for sample in samples if sample.container_name}
+
+ # ... and pick out relevant UDFs
+ first_sample = samples[0].dict()
+ assert first_sample["name"] == "BalsamicSample"
+ assert {sample.container for sample in samples} == set(["96 well plate"])
+ assert first_sample["udfs"]["data_analysis"] == Workflow.BALSAMIC
+ assert first_sample["udfs"]["application"] == "PANKTTR100"
+ assert first_sample["udfs"]["sex"] == "M"
+ assert first_sample["udfs"]["customer"] == "cust000"
+ assert first_sample["udfs"]["source"] == "cytology (FFPE)"
+ assert first_sample["udfs"]["volume"] == "42"
+ assert first_sample["udfs"]["priority"] == "standard"
+
+ assert container_names == set(["BalsamicPlate"])
+ assert first_sample["well_position"] == "A:1"
+ assert first_sample["udfs"]["tumour"] is True
+ assert first_sample["udfs"]["capture_kit"] == "GMCKsolid"
+ assert first_sample["udfs"]["tumour_purity"] == "13"
+
+ assert first_sample["udfs"]["formalin_fixation_time"] == "15"
+ assert first_sample["udfs"]["post_formalin_fixation_time"] == "3"
+ assert first_sample["udfs"]["tissue_block_size"] == "large"
+
+ assert first_sample["udfs"]["comment"] == "This is a sample comment"
diff --git a/tests/services/orders/order_lims_service/test_order_lims_service.py b/tests/services/orders/order_lims_service/test_order_lims_service.py
deleted file mode 100644
index c22499242b..0000000000
--- a/tests/services/orders/order_lims_service/test_order_lims_service.py
+++ /dev/null
@@ -1,179 +0,0 @@
-import pytest
-
-from cg.constants import Workflow
-from cg.services.orders.order_lims_service.order_lims_service import OrderLimsService
-from cg.models.lims.sample import LimsSample
-from cg.models.orders.constants import OrderType
-from cg.models.orders.order import OrderIn
-
-
-def test_to_lims_mip(mip_order_to_submit):
- # GIVEN a scout order for a trio
- order_data = OrderIn.parse_obj(obj=mip_order_to_submit, project=OrderType.MIP_DNA)
- # WHEN parsing the order to format for LIMS import
- samples: list[LimsSample] = OrderLimsService._build_lims_sample(
- customer="cust003", samples=order_data.samples
- )
-
- # THEN it should list all samples
- assert len(samples) == 4
-
- # THEN container should be 96 well plate for all samples
- assert {sample.container for sample in samples} == {"96 well plate"}
-
- # THEN container names should be the same for all samples
- container_names = {sample.container_name for sample in samples if sample.container_name}
- assert container_names == {"CMMS"}
-
- # ... and pick out relevant UDFs
- first_sample: LimsSample = samples[0]
- assert first_sample.well_position == "A:1"
- assert first_sample.udfs.family_name == "family1"
- assert first_sample.udfs.priority == "standard"
- assert first_sample.udfs.application == "WGSPCFC030"
- assert first_sample.udfs.source == "tissue (fresh frozen)"
- assert first_sample.udfs.quantity == "220"
- assert first_sample.udfs.customer == "cust003"
- assert first_sample.udfs.volume == "1.0"
-
- # THEN assert that the comment of a sample is a string
- assert isinstance(samples[1].udfs.comment, str)
-
-
-def test_to_lims_fastq(fastq_order_to_submit):
- # GIVEN a fastq order for two samples; normal vs. tumour
- order_data = OrderIn.parse_obj(obj=fastq_order_to_submit, project=OrderType.FASTQ)
-
- # WHEN parsing the order to format for LIMS
- samples: list[LimsSample] = OrderLimsService._build_lims_sample(
- customer="dummyCust", samples=order_data.samples
- )
-
- # THEN should "work"
- assert len(samples) == 2
- normal_sample = samples[0]
- tumor_sample = samples[1]
- # ... and pick out relevant UDF values
- assert normal_sample.udfs.tumour is False
- assert tumor_sample.udfs.tumour is True
- assert normal_sample.udfs.volume == "1"
-
-
-def test_to_lims_rml(rml_order_to_submit):
- # GIVEN a rml order for four samples
- order_data = OrderIn.parse_obj(obj=rml_order_to_submit, project=OrderType.RML)
-
- # WHEN parsing for LIMS
- samples: list[LimsSample] = OrderLimsService._build_lims_sample(
- customer="dummyCust", samples=order_data.samples
- )
-
- # THEN it should have found the same number of samples
- assert len(samples) == 4
- # ... and pick out relevant UDFs
- first_sample = samples[0]
- assert first_sample.udfs.pool == "pool-1"
- assert first_sample.udfs.volume == "30"
- assert first_sample.udfs.concentration == "5.0"
- assert first_sample.udfs.index == "IDT DupSeq 10 bp Set B"
- assert first_sample.udfs.index_number == "1"
-
-
-def test_to_lims_microbial(microbial_order_to_submit):
- # GIVEN a microbial order for three samples
- order_data = OrderIn.parse_obj(obj=microbial_order_to_submit, project=OrderType.MICROSALT)
-
- # WHEN parsing for LIMS
- samples: list[LimsSample] = OrderLimsService._build_lims_sample(
- customer="cust000", samples=order_data.samples
- )
- # THEN it should "work"
-
- assert len(samples) == 5
- # ... and pick out relevant UDFs
- first_sample = samples[0].dict()
- assert first_sample["udfs"]["priority"] == "research"
- assert first_sample["udfs"]["organism"] == "M.upium"
- assert first_sample["udfs"]["reference_genome"] == "NC_111"
- assert (
- first_sample["udfs"]["extraction_method"] == "MagNaPure 96 (contact Clinical Genomics "
- "before submission)"
- )
- assert first_sample["udfs"]["volume"] == "1"
-
-
-def test_to_lims_sarscov2(sarscov2_order_to_submit):
- # GIVEN a sarscov2 order for samples
- order_data = OrderIn.parse_obj(obj=sarscov2_order_to_submit, project=OrderType.SARS_COV_2)
-
- # WHEN parsing for LIMS
- samples: list[LimsSample] = OrderLimsService._build_lims_sample(
- customer="cust000", samples=order_data.samples
- )
-
- # THEN it should have found the same number of samples
- assert len(samples) == 6
- # ... and pick out relevant UDFs
- first_sample = samples[0].dict()
- assert first_sample["udfs"]["collection_date"] == "2021-05-05"
- assert first_sample["udfs"]["extraction_method"] == "MagNaPure 96"
- assert first_sample["udfs"]["lab_code"] == "SE100 Karolinska"
- assert first_sample["udfs"]["organism"] == "SARS CoV-2"
- assert first_sample["udfs"]["original_lab"] == "Karolinska University Hospital Solna"
- assert first_sample["udfs"]["original_lab_address"] == "171 76 Stockholm"
- assert first_sample["udfs"]["pre_processing_method"] == "COVIDSeq"
- assert first_sample["udfs"]["priority"] == "research"
- assert first_sample["udfs"]["reference_genome"] == "NC_111"
- assert first_sample["udfs"]["region"] == "Stockholm"
- assert first_sample["udfs"]["region_code"] == "01"
- assert first_sample["udfs"]["selection_criteria"] == "1. Allmän övervakning"
- assert first_sample["udfs"]["volume"] == "1"
-
-
-@pytest.mark.parametrize(
- "project", [OrderType.BALSAMIC, OrderType.BALSAMIC_UMI, OrderType.BALSAMIC_QC]
-)
-def test_to_lims_balsamic(balsamic_order_to_submit, project):
- # GIVEN a cancer order for a sample
- order_data = OrderIn.parse_obj(obj=balsamic_order_to_submit, project=project)
-
- # WHEN parsing the order to format for LIMS import
- samples: list[LimsSample] = OrderLimsService._build_lims_sample(
- customer="cust000", samples=order_data.samples
- )
- # THEN it should list all samples
-
- assert len(samples) == 1
- # ... and determine the container, container name, and well position
-
- container_names = {sample.container_name for sample in samples if sample.container_name}
-
- # ... and pick out relevant UDFs
- first_sample = samples[0].dict()
- assert first_sample["name"] == "s1"
- assert {sample.container for sample in samples} == set(["96 well plate"])
- assert first_sample["udfs"]["data_analysis"] in [
- Workflow.BALSAMIC,
- Workflow.BALSAMIC_QC,
- Workflow.BALSAMIC_UMI,
- ]
- assert first_sample["udfs"]["application"] == "WGSPCFC030"
- assert first_sample["udfs"]["sex"] == "M"
- assert first_sample["udfs"]["family_name"] == "family1"
- assert first_sample["udfs"]["customer"] == "cust000"
- assert first_sample["udfs"]["source"] == "blood"
- assert first_sample["udfs"]["volume"] == "1.0"
- assert first_sample["udfs"]["priority"] == "standard"
-
- assert container_names == set(["p1"])
- assert first_sample["well_position"] == "A:1"
- assert first_sample["udfs"]["tumour"] is True
- assert first_sample["udfs"]["capture_kit"] == "other"
- assert first_sample["udfs"]["tumour_purity"] == "75"
-
- assert first_sample["udfs"]["formalin_fixation_time"] == "1"
- assert first_sample["udfs"]["post_formalin_fixation_time"] == "2"
- assert first_sample["udfs"]["tissue_block_size"] == "small"
-
- assert first_sample["udfs"]["quantity"] == "2"
- assert first_sample["udfs"]["comment"] == "other Elution buffer"
diff --git a/tests/services/orders/order_store_service/test_fastq_order_service.py b/tests/services/orders/order_store_service/test_fastq_order_service.py
deleted file mode 100644
index 7bbc9251bb..0000000000
--- a/tests/services/orders/order_store_service/test_fastq_order_service.py
+++ /dev/null
@@ -1,197 +0,0 @@
-import datetime as dt
-
-import pytest
-
-from cg.constants import DataDelivery, Workflow
-from cg.constants.sequencing import SeqLibraryPrepCategory
-from cg.exc import OrderError
-from cg.models.orders.order import OrderIn, OrderType
-from cg.services.orders.store_order_services.store_fastq_order_service import StoreFastqOrderService
-from cg.store.models import Application, Case, Sample
-from cg.store.store import Store
-
-
-def test_samples_to_status(
- fastq_order_to_submit: dict, store_fastq_order_service: StoreFastqOrderService
-):
- # GIVEN fastq order with two samples
- order = OrderIn.parse_obj(fastq_order_to_submit, OrderType.FASTQ)
-
- # WHEN parsing for status
- data: dict = store_fastq_order_service.order_to_status(order=order)
-
- # THEN it should pick out samples and relevant information
- assert len(data["samples"]) == 2
- first_sample: dict = data["samples"][0]
- assert first_sample["name"] == "prov1"
- assert first_sample["application"] == "WGSPCFC060"
- assert first_sample["priority"] == "priority"
- assert first_sample["tumour"] is False
- assert first_sample["volume"] == "1"
-
- # THEN the other sample is a tumour
- assert data["samples"][1]["tumour"] is True
-
-
-def test_store_samples(
- base_store: Store,
- fastq_status_data: dict,
- ticket_id: str,
- store_fastq_order_service: StoreFastqOrderService,
-):
- # GIVEN a basic store with no samples and a fastq order
- assert not base_store._get_query(table=Sample).first()
- assert base_store._get_query(table=Case).count() == 0
-
- # WHEN storing the order
- new_samples = store_fastq_order_service.store_items_in_status(
- customer_id=fastq_status_data["customer"],
- order=fastq_status_data["order"],
- ordered=dt.datetime.now(),
- ticket_id=ticket_id,
- items=fastq_status_data["samples"],
- )
-
- # THEN it should store the samples and create a case for each sample
- assert len(new_samples) == 2
- assert len(base_store._get_query(table=Sample).all()) == 2
- assert base_store._get_query(table=Case).count() == 2
- first_sample = new_samples[0]
- assert len(first_sample.links) == 2
- family_link = first_sample.links[0]
- assert family_link.case in base_store.get_cases()
- assert family_link.case.data_analysis
- assert family_link.case.data_delivery in [DataDelivery.FASTQ, DataDelivery.NO_DELIVERY]
-
-
-def test_store_samples_sex_stored(
- base_store: Store,
- fastq_status_data: dict,
- ticket_id: str,
- store_fastq_order_service: StoreFastqOrderService,
-):
- # GIVEN a basic store with no samples and a fastq order
- assert not base_store._get_query(table=Sample).first()
- assert base_store._get_query(table=Case).count() == 0
-
- # WHEN storing the order
- new_samples = store_fastq_order_service.store_items_in_status(
- customer_id=fastq_status_data["customer"],
- order=fastq_status_data["order"],
- ordered=dt.datetime.now(),
- ticket_id=ticket_id,
- items=fastq_status_data["samples"],
- )
-
- # THEN the sample sex should be stored
- assert new_samples[0].sex == "male"
-
-
-def test_store_fastq_samples_non_tumour_wgs_to_mip(
- base_store: Store, fastq_status_data: dict, store_fastq_order_service: StoreFastqOrderService
-):
- # GIVEN a basic store with no samples and a non-tumour fastq order as wgs
- assert not base_store._get_query(table=Sample).first()
- assert base_store._get_query(table=Case).count() == 0
- base_store.get_application_by_tag(
- fastq_status_data["samples"][0]["application"]
- ).prep_category = SeqLibraryPrepCategory.WHOLE_GENOME_SEQUENCING
- fastq_status_data["samples"][0]["tumour"] = False
-
- # WHEN storing the order
- new_samples = store_fastq_order_service.store_items_in_status(
- customer_id=fastq_status_data["customer"],
- order=fastq_status_data["order"],
- ordered=dt.datetime.now(),
- ticket_id=1234348,
- items=fastq_status_data["samples"],
- )
-
- # THEN the analysis for the case should be MAF
- assert new_samples[0].links[0].case.data_analysis == Workflow.MIP_DNA
-
-
-def test_store_fastq_samples_tumour_wgs_to_fastq(
- base_store: Store,
- fastq_status_data: dict,
- ticket_id: str,
- store_fastq_order_service: StoreFastqOrderService,
-):
- # GIVEN a basic store with no samples and a tumour fastq order as wgs
- assert not base_store._get_query(table=Sample).first()
- assert base_store._get_query(table=Case).count() == 0
- base_store.get_application_by_tag(
- fastq_status_data["samples"][0]["application"]
- ).prep_category = SeqLibraryPrepCategory.WHOLE_GENOME_SEQUENCING
- fastq_status_data["samples"][0]["tumour"] = True
-
- # WHEN storing the order
- new_samples = store_fastq_order_service.store_items_in_status(
- customer_id=fastq_status_data["customer"],
- order=fastq_status_data["order"],
- ordered=dt.datetime.now(),
- ticket_id=ticket_id,
- items=fastq_status_data["samples"],
- )
-
- # THEN the analysis for the case should be RAW_DATA
- assert new_samples[0].links[0].case.data_analysis == Workflow.RAW_DATA
-
-
-def test_store_fastq_samples_non_wgs_as_fastq(
- base_store: Store,
- fastq_status_data: dict,
- ticket_id: str,
- store_fastq_order_service: StoreFastqOrderService,
-):
- # GIVEN a basic store with no samples and a fastq order as non wgs
- assert not base_store._get_query(table=Sample).first()
- assert base_store._get_query(table=Case).count() == 0
- non_wgs_prep_category = SeqLibraryPrepCategory.WHOLE_EXOME_SEQUENCING
-
- non_wgs_applications = base_store._get_query(table=Application).filter(
- Application.prep_category == non_wgs_prep_category
- )
-
- assert non_wgs_applications
-
- for sample in fastq_status_data["samples"]:
- sample["application"] = non_wgs_applications[0].tag
-
- # WHEN storing the order
- new_samples = store_fastq_order_service.store_items_in_status(
- customer_id=fastq_status_data["customer"],
- order=fastq_status_data["order"],
- ordered=dt.datetime.now(),
- ticket_id=ticket_id,
- items=fastq_status_data["samples"],
- )
-
- # THEN the analysis for the case should be RAW_DATA (none)
- assert new_samples[0].links[0].case.data_analysis == Workflow.RAW_DATA
-
-
-def test_store_samples_bad_apptag(
- base_store: Store,
- fastq_status_data: dict,
- ticket_id: str,
- store_fastq_order_service: StoreFastqOrderService,
-):
- # GIVEN a basic store with no samples and a fastq order
- assert not base_store._get_query(table=Sample).first()
- assert base_store._get_query(table=Case).count() == 0
-
- # GIVEN a non-existing application tag
- for sample in fastq_status_data["samples"]:
- sample["application"] = "nonexistingtag"
-
- # THEN it should raise OrderError
- with pytest.raises(OrderError):
- # WHEN storing the order
- store_fastq_order_service.store_items_in_status(
- customer_id=fastq_status_data["customer"],
- order=fastq_status_data["order"],
- ordered=dt.datetime.now(),
- ticket_id=ticket_id,
- items=fastq_status_data["samples"],
- )
diff --git a/tests/services/orders/order_store_service/test_generic_order_store_service.py b/tests/services/orders/order_store_service/test_generic_order_store_service.py
deleted file mode 100644
index 56459bb7c8..0000000000
--- a/tests/services/orders/order_store_service/test_generic_order_store_service.py
+++ /dev/null
@@ -1,196 +0,0 @@
-"""Module to test the StoreGenericOrderService class."""
-
-import datetime as dt
-import math
-from copy import deepcopy
-
-from cg.constants import DataDelivery, Priority, Workflow
-from cg.models.orders.order import OrderIn, OrderType
-from cg.services.orders.store_order_services.store_case_order import (
- StoreCaseOrderService,
-)
-from cg.store.models import Sample
-from cg.store.store import Store
-
-
-def test_cases_to_status(
- mip_order_to_submit: dict, store_generic_order_service: StoreCaseOrderService
-):
- # GIVEN a scout order with a trio case
- project: OrderType = OrderType.MIP_DNA
- order = OrderIn.parse_obj(mip_order_to_submit, project=project)
-
- # WHEN parsing for status
- data = store_generic_order_service.order_to_status(order=order)
-
- # THEN it should pick out the case
- assert len(data["families"]) == 2
- family = data["families"][0]
- assert family["name"] == "family1"
- assert family["data_analysis"] == Workflow.MIP_DNA
- assert family["data_delivery"] == str(DataDelivery.SCOUT)
- assert family["priority"] == Priority.standard.name
- assert family["cohorts"] == ["Other"]
- assert (
- family["synopsis"]
- == "As for the synopsis it will be this overly complex sentence to prove that the synopsis field might in fact be a very long string, which we should be prepared for."
- )
- assert set(family["panels"]) == {"IEM"}
- assert len(family["samples"]) == 3
-
- first_sample = family["samples"][0]
- assert math.isclose(first_sample["age_at_sampling"], 17.18192, rel_tol=1e-09, abs_tol=1e-09)
- assert first_sample["name"] == "sample1"
- assert first_sample["application"] == "WGSPCFC030"
- assert first_sample["phenotype_groups"] == ["Phenotype-group"]
- assert first_sample["phenotype_terms"] == ["HP:0012747", "HP:0025049"]
- assert first_sample["sex"] == "female"
- assert first_sample["status"] == "affected"
- assert first_sample["subject_id"] == "subject1"
- assert first_sample["mother"] == "sample2"
- assert first_sample["father"] == "sample3"
-
- # ... second sample has a comment
- assert isinstance(family["samples"][1]["comment"], str)
-
-
-def test_cases_to_status_synopsis(
- mip_order_to_submit: dict, store_generic_order_service: StoreCaseOrderService
-):
- # GIVEN a scout order with a trio case where synopsis is None
- modified_order: dict = deepcopy(mip_order_to_submit)
- for sample in modified_order["samples"]:
- sample["synopsis"] = None
-
- project: OrderType = OrderType.MIP_DNA
- order = OrderIn.parse_obj(mip_order_to_submit, project=project)
-
- # WHEN parsing for status
- store_generic_order_service.order_to_status(order=order)
-
- # THEN No exception should have been raised on synopsis
-
-
-def test_store_mip(
- base_store: Store,
- mip_status_data: dict,
- ticket_id: str,
- store_generic_order_service: StoreCaseOrderService,
-):
- # GIVEN a basic store with no samples or nothing in it + scout order
- assert not base_store._get_query(table=Sample).first()
- assert not base_store.get_cases()
-
- # WHEN storing the order
- new_families = store_generic_order_service.store_items_in_status(
- customer_id=mip_status_data["customer"],
- order=mip_status_data["order"],
- ordered=dt.datetime.now(),
- ticket_id=ticket_id,
- items=mip_status_data["families"],
- )
-
- # THEN it should create and link samples and the case
- assert len(new_families) == 2
- new_case = new_families[0]
- assert new_case.name == "family1"
- assert set(new_case.panels) == {"IEM"}
- assert new_case.priority_human == Priority.standard.name
-
- assert len(new_case.links) == 3
- new_link = new_case.links[0]
- assert new_case.data_analysis == Workflow.MIP_DNA
- assert new_case.data_delivery == str(DataDelivery.SCOUT)
- assert set(new_case.cohorts) == {"Other"}
- assert (
- new_case.synopsis
- == "As for the synopsis it will be this overly complex sentence to prove that the synopsis field might in fact be a very long string, which we should be prepared for."
- )
- assert new_link.status == "affected"
- assert new_link.mother.name == "sample2"
- assert new_link.father.name == "sample3"
- assert new_link.sample.name == "sample1"
- assert new_link.sample.sex == "female"
- assert new_link.sample.application_version.application.tag == "WGSPCFC030"
- assert new_link.sample.is_tumour
- assert isinstance(new_case.links[1].sample.comment, str)
-
- assert set(new_link.sample.phenotype_groups) == {"Phenotype-group"}
- assert set(new_link.sample.phenotype_terms) == {"HP:0012747", "HP:0025049"}
- assert new_link.sample.subject_id == "subject1"
- assert math.isclose(new_link.sample.age_at_sampling, 17.18192, rel_tol=1e-09, abs_tol=1e-09)
-
-
-def test_store_mip_rna(
- base_store: Store,
- mip_rna_status_data,
- ticket_id: str,
- store_generic_order_service: StoreCaseOrderService,
-):
- # GIVEN a basic store with no samples or nothing in it + rna order
- rna_application_tag = "RNAPOAR025"
- assert not base_store._get_query(table=Sample).first()
- assert not base_store.get_cases()
- assert base_store.get_application_by_tag(tag=rna_application_tag)
-
- # WHEN storing the order
- new_cases = store_generic_order_service.store_items_in_status(
- customer_id=mip_rna_status_data["customer"],
- order=mip_rna_status_data["order"],
- ordered=dt.datetime.now(),
- ticket_id=ticket_id,
- items=mip_rna_status_data["families"],
- )
-
- # THEN it should create and link samples and the casing
- assert len(new_cases) == 1
- new_casing = new_cases[0]
-
- assert len(new_casing.links) == 2
- new_link = new_casing.links[0]
- assert new_casing.data_analysis == Workflow.MIP_RNA
- assert new_casing.data_delivery == str(DataDelivery.SCOUT)
- assert new_link.sample.name == "sample1-rna-t1"
- assert new_link.sample.application_version.application.tag == rna_application_tag
-
-
-def test_store_cancer_samples(
- base_store: Store,
- balsamic_status_data: dict,
- ticket_id: str,
- store_generic_order_service: StoreCaseOrderService,
-):
-
- # GIVEN a basic store with no samples and a cancer order
- assert not base_store._get_query(table=Sample).first()
- assert not base_store.get_cases()
-
- # WHEN storing the order
- new_families = store_generic_order_service.store_items_in_status(
- customer_id=balsamic_status_data["customer"],
- order=balsamic_status_data["order"],
- ordered=dt.datetime.now(),
- ticket_id=ticket_id,
- items=balsamic_status_data["families"],
- )
-
- # THEN it should create and link samples and the case
- assert len(new_families) == 1
- new_case = new_families[0]
- assert new_case.name == "family1"
- assert new_case.data_analysis in [
- Workflow.BALSAMIC,
- Workflow.BALSAMIC_QC,
- Workflow.BALSAMIC_UMI,
- ]
- assert new_case.data_delivery == str(DataDelivery.FASTQ_ANALYSIS_SCOUT)
- assert not set(new_case.panels)
- assert new_case.priority_human == Priority.standard.name
-
- assert len(new_case.links) == 1
- new_link = new_case.links[0]
- assert new_link.sample.name == "s1"
- assert new_link.sample.sex == "male"
- assert new_link.sample.application_version.application.tag == "WGSPCFC030"
- assert new_link.sample.comment == "other Elution buffer"
- assert new_link.sample.is_tumour
diff --git a/tests/services/orders/order_store_service/test_metagenome_store_service.py b/tests/services/orders/order_store_service/test_metagenome_store_service.py
deleted file mode 100644
index 7d8b91573f..0000000000
--- a/tests/services/orders/order_store_service/test_metagenome_store_service.py
+++ /dev/null
@@ -1,51 +0,0 @@
-import datetime as dt
-import pytest
-from cg.exc import OrderError
-from cg.models.orders.order import OrderIn, OrderType
-from cg.services.orders.store_order_services.store_metagenome_order import (
- StoreMetagenomeOrderService,
-)
-from cg.store.models import Sample
-from cg.store.store import Store
-
-
-def test_metagenome_to_status(
- metagenome_order_to_submit: dict, store_metagenome_order_service: StoreMetagenomeOrderService
-):
- # GIVEN metagenome order with two samples
- order = OrderIn.parse_obj(metagenome_order_to_submit, OrderType.METAGENOME)
-
- # WHEN parsing for status
- data = store_metagenome_order_service.order_to_status(order=order)
- case = data["families"][0]
- # THEN it should pick out samples and relevant information
- assert len(case["samples"]) == 2
- first_sample = case["samples"][0]
- assert first_sample["name"] == "Bristol"
- assert first_sample["application"] == "METLIFR020"
- assert first_sample["priority"] == "standard"
- assert first_sample["volume"] == "1.0"
-
-
-def test_store_metagenome_samples_bad_apptag(
- base_store: Store,
- metagenome_status_data: dict,
- ticket_id: str,
- store_metagenome_order_service: StoreMetagenomeOrderService,
-):
- # GIVEN a basic store with no samples and a metagenome order
- assert not base_store._get_query(table=Sample).first()
-
- for sample in metagenome_status_data["families"][0]["samples"]:
- sample["application"] = "nonexistingtag"
-
- # THEN it should raise OrderError
- with pytest.raises(OrderError):
- # WHEN storing the order
- store_metagenome_order_service.store_items_in_status(
- customer_id=metagenome_status_data["customer"],
- order=metagenome_status_data["order"],
- ordered=dt.datetime.now(),
- ticket_id=ticket_id,
- items=metagenome_status_data["families"],
- )
diff --git a/tests/services/orders/order_store_service/test_microbial_fastq_order_store_service.py b/tests/services/orders/order_store_service/test_microbial_fastq_order_store_service.py
deleted file mode 100644
index f0764ebac3..0000000000
--- a/tests/services/orders/order_store_service/test_microbial_fastq_order_store_service.py
+++ /dev/null
@@ -1,68 +0,0 @@
-from datetime import datetime
-
-from cg.constants import DataDelivery, Workflow
-from cg.models.orders.constants import OrderType
-from cg.models.orders.order import OrderIn
-from cg.services.orders.store_order_services.store_microbial_fastq_order_service import (
- StoreMicrobialFastqOrderService,
-)
-from cg.store.models import Case, Sample
-
-
-def test_microbial_samples_to_status(
- microbial_fastq_order_to_submit: dict,
- store_microbial_fastq_order_service: StoreMicrobialFastqOrderService,
-):
- # GIVEN microbial order with three samples
- order = OrderIn.parse_obj(microbial_fastq_order_to_submit, OrderType.MICROBIAL_FASTQ)
-
- # WHEN parsing for status
- data = store_microbial_fastq_order_service.order_to_status(order=order)
-
- # THEN it should pick out samples and relevant information
- assert len(data["samples"]) == 2
- assert data["customer"] == "cust002"
- assert data["order"] == "Microbial Fastq order"
- assert data["comment"] == ""
-
- # THEN first sample should contain all the relevant data from the microbial order
- sample_data = data["samples"][0]
- assert sample_data["priority"] == "priority"
- assert sample_data["name"] == "prov1"
- assert sample_data.get("internal_id") is None
- assert sample_data["application"] == "WGSPCFC060"
- assert sample_data["comment"] == "sample comment"
- assert sample_data["volume"] == "1"
- assert sample_data["data_analysis"] == Workflow.MICROSALT
- assert sample_data["data_delivery"] == str(DataDelivery.FASTQ)
-
-
-def test_store_samples(
- microbial_fastq_status_data: dict,
- ticket_id: str,
- store_microbial_fastq_order_service: StoreMicrobialFastqOrderService,
-):
- # GIVEN a basic store with no samples and a fastq order
-
- assert not store_microbial_fastq_order_service.status_db._get_query(table=Sample).first()
- assert store_microbial_fastq_order_service.status_db._get_query(table=Case).count() == 0
-
- # WHEN storing the order
- new_samples = store_microbial_fastq_order_service.store_items_in_status(
- customer_id=microbial_fastq_status_data["customer"],
- order=microbial_fastq_status_data["order"],
- ordered=datetime.now(),
- ticket_id=ticket_id,
- items=microbial_fastq_status_data["samples"],
- )
-
- # THEN it should store the samples and create a case for each sample
- assert len(new_samples) == 2
- assert len(store_microbial_fastq_order_service.status_db._get_query(table=Sample).all()) == 2
- assert store_microbial_fastq_order_service.status_db._get_query(table=Case).count() == 2
- first_sample = new_samples[0]
- assert len(first_sample.links) == 1
- case_link = first_sample.links[0]
- assert case_link.case in store_microbial_fastq_order_service.status_db.get_cases()
- assert case_link.case.data_analysis
- assert case_link.case.data_delivery == DataDelivery.FASTQ
diff --git a/tests/services/orders/order_store_service/test_microbial_store_order_service.py b/tests/services/orders/order_store_service/test_microbial_store_order_service.py
deleted file mode 100644
index c638fd43dd..0000000000
--- a/tests/services/orders/order_store_service/test_microbial_store_order_service.py
+++ /dev/null
@@ -1,247 +0,0 @@
-from cg.services.orders.store_order_services.store_microbial_order import StoreMicrobialOrderService
-from cg.store.models import Case
-import datetime as dt
-from cg.constants import DataDelivery
-from cg.constants.constants import Workflow
-from cg.models.orders.constants import OrderType
-from cg.models.orders.order import OrderIn
-from cg.models.orders.sample_base import ControlEnum
-from cg.models.orders.samples import SarsCov2Sample
-from cg.store.models import Customer, Sample
-from cg.store.store import Store
-
-
-def test_microbial_samples_to_status(
- microbial_order_to_submit: dict, store_microbial_order_service: StoreMicrobialOrderService
-):
- # GIVEN microbial order with three samples
- order = OrderIn.parse_obj(microbial_order_to_submit, OrderType.MICROSALT)
-
- # WHEN parsing for status
- data = store_microbial_order_service.order_to_status(order=order)
-
- # THEN it should pick out samples and relevant information
- assert len(data["samples"]) == 5
- assert data["customer"] == "cust002"
- assert data["order"] == "Microbial samples"
- assert data["comment"] == "Order comment"
- assert data["data_analysis"] == Workflow.MICROSALT
- assert data["data_delivery"] == str(DataDelivery.FASTQ)
-
- # THEN first sample should contain all the relevant data from the microbial order
- sample_data = data["samples"][0]
- assert sample_data["priority"] == "research"
- assert sample_data["name"] == "all-fields"
- assert sample_data.get("internal_id") is None
- assert sample_data["organism_id"] == "M.upium"
- assert sample_data["reference_genome"] == "NC_111"
- assert sample_data["application"] == "MWRNXTR003"
- assert sample_data["comment"] == "plate comment"
- assert sample_data["volume"] == "1"
-
-
-def test_sarscov2_samples_to_status(
- sarscov2_order_to_submit: dict, store_microbial_order_service: StoreMicrobialOrderService
-):
- # GIVEN sarscov2 order with three samples
- order = OrderIn.parse_obj(sarscov2_order_to_submit, OrderType.SARS_COV_2)
-
- # WHEN parsing for status
- data = store_microbial_order_service.order_to_status(order=order)
-
- # THEN it should pick out samples and relevant information
- assert len(data["samples"]) == 6
- assert data["customer"] == "cust002"
- assert data["order"] == "Sars-CoV-2 samples"
- assert data["comment"] == "Order comment"
- assert data["data_analysis"] == Workflow.MUTANT
- assert data["data_delivery"] == str(DataDelivery.FASTQ)
-
- # THEN first sample should contain all the relevant data from the microbial order
- sample_data = data["samples"][0]
- assert sample_data.get("internal_id") is None
- assert sample_data["priority"] == "research"
- assert sample_data["application"] == "VWGDPTR001"
- assert sample_data["comment"] == "plate comment"
- assert sample_data["name"] == "all-fields"
- assert sample_data["organism_id"] == "SARS CoV-2"
- assert sample_data["reference_genome"] == "NC_111"
- assert sample_data["volume"] == "1"
-
-
-def test_store_microbial_samples(
- base_store: Store,
- microbial_status_data: dict,
- ticket_id: str,
- store_microbial_order_service: StoreMicrobialOrderService,
-):
- # GIVEN a basic store with no samples and a microbial order and one Organism
- assert not base_store._get_query(table=Sample).first()
- assert base_store._get_query(table=Case).count() == 0
- assert base_store.get_all_organisms().count() == 1
-
- # WHEN storing the order
- new_samples = store_microbial_order_service.store_items_in_status(
- customer_id=microbial_status_data["customer"],
- order=microbial_status_data["order"],
- ordered=dt.datetime.now(),
- ticket_id=ticket_id,
- items=microbial_status_data["samples"],
- comment="",
- data_analysis=Workflow.MICROSALT,
- data_delivery=DataDelivery.FASTQ_QC,
- )
-
- # THEN it should store the samples under a case (case) and the used previously unknown
- # organisms
- assert new_samples
- assert base_store._get_query(table=Case).count() == 1
- assert len(new_samples) == 5
- assert len(base_store._get_query(table=Sample).all()) == 5
- assert base_store.get_all_organisms().count() == 3
-
-
-def test_store_microbial_case_data_analysis_stored(
- base_store: Store,
- microbial_status_data: dict,
- ticket_id: str,
- store_microbial_order_service: StoreMicrobialOrderService,
-):
- # GIVEN a basic store with no samples and a microbial order and one Organism
- assert not base_store._get_query(table=Sample).first()
- assert base_store._get_query(table=Case).count() == 0
-
- # WHEN storing the order
- store_microbial_order_service.store_items_in_status(
- customer_id=microbial_status_data["customer"],
- order=microbial_status_data["order"],
- ordered=dt.datetime.now(),
- ticket_id=ticket_id,
- items=microbial_status_data["samples"],
- comment="",
- data_analysis=Workflow.MICROSALT,
- data_delivery=DataDelivery.FASTQ_QC,
- )
-
- # THEN store the samples under a case with the microbial data_analysis type on case level
- assert len(base_store._get_query(table=Sample).all()) > 0
- assert base_store._get_query(table=Case).count() == 1
-
- microbial_case = base_store.get_cases()[0]
- assert microbial_case.data_analysis == Workflow.MICROSALT
- assert microbial_case.data_delivery == str(DataDelivery.FASTQ_QC)
-
-
-def test_store_microbial_sample_priority(
- base_store: Store,
- microbial_status_data: dict,
- ticket_id: str,
- store_microbial_order_service: StoreMicrobialOrderService,
-):
- # GIVEN a basic store with no samples
- assert not base_store._get_query(table=Sample).first()
-
- # WHEN storing the order
- store_microbial_order_service.store_items_in_status(
- customer_id=microbial_status_data["customer"],
- order=microbial_status_data["order"],
- ordered=dt.datetime.now(),
- ticket_id=ticket_id,
- items=microbial_status_data["samples"],
- comment="",
- data_analysis=Workflow.MICROSALT,
- data_delivery=DataDelivery.FASTQ_QC,
- )
-
- # THEN it should store the sample priority
- assert len(base_store._get_query(table=Sample).all()) > 0
- microbial_sample = base_store._get_query(table=Sample).first()
-
- assert microbial_sample.priority_human == "research"
-
-
-def test_order_to_status_control_exists(
- sarscov2_order_to_submit: dict,
- base_store: Store,
- store_microbial_order_service: StoreMicrobialOrderService,
-):
- # GIVEN sarscov2 order with three samples
- order: OrderIn = OrderIn.parse_obj(sarscov2_order_to_submit, OrderType.SARS_COV_2)
-
- # WHEN transforming order to status structure
- result: dict = store_microbial_order_service.order_to_status(order=order)
-
- # THEN check that control is in the result
- sample: dict
- for sample in result.get("samples"):
- assert "control" in sample
-
-
-def test_order_to_status_control_has_input_value(
- sarscov2_order_to_submit: dict,
- base_store: Store,
- store_microbial_order_service: StoreMicrobialOrderService,
-):
- # GIVEN sarscov2 order with three samples with control value set
- control_value = ControlEnum.positive
- order: OrderIn = OrderIn.parse_obj(sarscov2_order_to_submit, OrderType.SARS_COV_2)
- sample: SarsCov2Sample
- for sample in order.samples:
- sample.control: ControlEnum = control_value
-
- # WHEN transforming order to status structure
- result: dict = store_microbial_order_service.order_to_status(order=order)
-
- # THEN check that control is in the result
- sample: dict
- for sample in result.get("samples"):
- assert control_value in sample.get("control")
-
-
-def test_mutant_sample_generates_fields(sarscov2_order_to_submit: dict, base_store: Store):
- """Tests that Mutant orders with region and original_lab set can generate region_code and original_lab_address."""
- # GIVEN sarscov2 order with six samples, one without region_code and original_lab_address
-
- # WHEN parsing the order
- order: OrderIn = OrderIn.parse_obj(sarscov2_order_to_submit, OrderType.SARS_COV_2)
-
- # THEN all samples should have region_code and original_lab_address set
- for sample in order.samples:
- assert sample.region_code
- assert sample.original_lab_address
-
-
-def test_store_items_in_status_control_has_stored_value(
- sarscov2_order_to_submit: dict,
- base_store: Store,
- store_microbial_order_service: StoreMicrobialOrderService,
-):
- # GIVEN sarscov2 order with three samples with control value
- order: OrderIn = OrderIn.parse_obj(sarscov2_order_to_submit, OrderType.SARS_COV_2)
- control_value = ControlEnum.positive
- sample: SarsCov2Sample
- for sample in order.samples:
- sample.control: ControlEnum = control_value
-
- status_data = store_microbial_order_service.order_to_status(order=order)
-
- # WHEN storing the order
- store_microbial_order_service.store_items_in_status(
- comment="",
- customer_id=order.customer,
- data_analysis=Workflow.MUTANT,
- data_delivery=DataDelivery.FASTQ,
- order="",
- ordered=dt.datetime.now(),
- ticket_id=123456,
- items=status_data.get("samples"),
- )
-
- # THEN control should exist on the sample in the store
- customer: Customer = base_store.get_customer_by_internal_id(customer_internal_id=order.customer)
- sample: SarsCov2Sample
- for sample in order.samples:
- stored_sample: Sample = base_store.get_sample_by_customer_and_name(
- customer_entry_id=[customer.id], sample_name=sample.name
- )
- assert stored_sample.control == control_value
diff --git a/tests/services/orders/order_store_service/test_pacbio_order_service.py b/tests/services/orders/order_store_service/test_pacbio_order_service.py
deleted file mode 100644
index a7336f54c9..0000000000
--- a/tests/services/orders/order_store_service/test_pacbio_order_service.py
+++ /dev/null
@@ -1,72 +0,0 @@
-from datetime import datetime
-
-from cg.constants import DataDelivery, Workflow
-from cg.models.orders.constants import OrderType
-from cg.models.orders.order import OrderIn
-from cg.models.orders.sample_base import SexEnum
-from cg.services.orders.store_order_services.store_pacbio_order_service import (
- StorePacBioOrderService,
-)
-from cg.store.models import Case, Sample
-from cg.store.store import Store
-
-
-def test_order_to_status(
- pacbio_order_to_submit: dict, store_pacbio_order_service: StorePacBioOrderService
-):
- """Test that a PacBio order is parsed correctly."""
- # GIVEN a PacBio order with two samples
- order = OrderIn.parse_obj(pacbio_order_to_submit, OrderType.PACBIO_LONG_READ)
-
- # WHEN parsing for status
- data = store_pacbio_order_service.order_to_status(order=order)
-
- # THEN it should pick out samples and relevant information
- assert len(data["samples"]) == 2
- first_sample = data["samples"][0]
- assert first_sample["name"] == "prov1"
- assert first_sample["application"] == "WGSPCFC060"
- assert first_sample["priority"] == "priority"
- assert first_sample["tumour"] is False
- assert first_sample["volume"] == "25"
-
- # THEN the other sample is a tumour
- assert data["samples"][1]["tumour"] is True
-
-
-def test_store_order(
- base_store: Store,
- pacbio_status_data: dict,
- ticket_id: str,
- store_pacbio_order_service: StorePacBioOrderService,
-):
- """Test that a PacBio order is stored in the database."""
- # GIVEN a basic store with no samples and a PacBio order
- assert not base_store._get_query(table=Sample).first()
- assert base_store._get_query(table=Case).count() == 0
-
- # WHEN storing the order
- new_samples: list[Sample] = store_pacbio_order_service._store_samples_in_statusdb(
- customer_id=pacbio_status_data["customer"],
- order=pacbio_status_data["order"],
- ordered=datetime.now(),
- ticket_id=ticket_id,
- samples=pacbio_status_data["samples"],
- )
-
- # THEN it should store the samples and create a case for each sample
- assert len(new_samples) == 2
- assert len(base_store._get_query(table=Sample).all()) == 2
- assert base_store._get_query(table=Case).count() == 2
- for new_sample in new_samples:
- assert len(new_sample.links) == 1
- case_link = new_sample.links[0]
- assert case_link.case in base_store.get_cases()
- assert case_link.case.data_analysis
- assert case_link.case.data_delivery in [DataDelivery.BAM, DataDelivery.NO_DELIVERY]
-
- # THEN the sample sex should be stored
- assert new_samples[0].sex == SexEnum.female
-
- # THEN the analysis for the case should be RAW_DATA
- assert new_samples[0].links[0].case.data_analysis == Workflow.RAW_DATA
diff --git a/tests/services/orders/order_store_service/test_pool_order_store_service.py b/tests/services/orders/order_store_service/test_pool_order_store_service.py
deleted file mode 100644
index f0b6a278c0..0000000000
--- a/tests/services/orders/order_store_service/test_pool_order_store_service.py
+++ /dev/null
@@ -1,86 +0,0 @@
-import datetime as dt
-
-from cg.constants import DataDelivery, Workflow
-from cg.models.orders.order import OrderIn, OrderType
-from cg.services.orders.store_order_services.store_pool_order import StorePoolOrderService
-from cg.store.models import Case, Pool, Sample
-from cg.store.store import Store
-
-
-def test_pools_to_status(
- rml_order_to_submit: dict, store_pool_order_service: StorePoolOrderService
-):
- # GIVEN a rml order with three samples in one pool
- order = OrderIn.parse_obj(rml_order_to_submit, OrderType.RML)
-
- # WHEN parsing for status
- data = store_pool_order_service.order_to_status(order=order)
-
- # THEN it should pick out the general information
- assert data["customer"] == "cust000"
- assert data["order"] == "#123456"
- assert data["comment"] == "order comment"
-
- # ... and information about the pool(s)
- assert len(data["pools"]) == 2
- pool = data["pools"][0]
- assert pool["name"] == "pool-1"
- assert pool["application"] == "RMLP05R800"
- assert pool["data_analysis"] == Workflow.RAW_DATA
- assert pool["data_delivery"] == str(DataDelivery.FASTQ)
- assert len(pool["samples"]) == 2
- sample = pool["samples"][0]
- assert sample["name"] == "sample1"
- assert sample["comment"] == "test comment"
- assert pool["priority"] == "research"
- assert sample["control"] == "negative"
-
-
-def test_store_rml(
- base_store: Store,
- rml_status_data: dict,
- ticket_id: str,
- store_pool_order_service: StorePoolOrderService,
-):
- # GIVEN a basic store with no samples and a rml order
- assert base_store._get_query(table=Pool).count() == 0
- assert base_store._get_query(table=Case).count() == 0
- assert not base_store._get_query(table=Sample).first()
-
- # WHEN storing the order
- new_pools = store_pool_order_service.store_items_in_status(
- customer_id=rml_status_data["customer"],
- order=rml_status_data["order"],
- ordered=dt.datetime.now(),
- ticket_id=ticket_id,
- items=rml_status_data["pools"],
- )
-
- # THEN it should update the database with new pools
- assert len(new_pools) == 2
-
- assert base_store._get_query(table=Pool).count() == base_store._get_query(table=Case).count()
- assert len(base_store._get_query(table=Sample).all()) == 4
-
- # ASSERT that there is one negative sample
- negative_samples = 0
- for sample in base_store._get_query(table=Sample).all():
- if sample.control == "negative":
- negative_samples += 1
- assert negative_samples == 1
-
- new_pool = base_store._get_query(table=Pool).order_by(Pool.created_at.desc()).first()
- assert new_pool == new_pools[1]
-
- assert new_pool.name == "pool-2"
- assert new_pool.application_version.application.tag == "RMLP05R800"
- assert not hasattr(new_pool, "data_analysis")
-
- new_case = base_store.get_cases()[0]
- assert new_case.data_analysis == Workflow.RAW_DATA
- assert new_case.data_delivery == str(DataDelivery.FASTQ)
-
- # and that the pool is set for invoicing but not the samples of the pool
- assert not new_pool.no_invoice
- for link in new_case.links:
- assert link.sample.no_invoice
diff --git a/tests/services/orders/store_service/test_fastq_order_service.py b/tests/services/orders/store_service/test_fastq_order_service.py
new file mode 100644
index 0000000000..d42b1cd186
--- /dev/null
+++ b/tests/services/orders/store_service/test_fastq_order_service.py
@@ -0,0 +1,145 @@
+"""
+Module to test the store_order_data_in_status_db method of the StoreFastqOrderService class.
+The function store_order_data_in_status_db is never expected to fail, as its input order should
+have always been validated before calling the function.
+"""
+
+from cg.constants import DataDelivery, Workflow
+from cg.constants.sequencing import SeqLibraryPrepCategory
+from cg.services.orders.storing.constants import MAF_ORDER_ID
+from cg.services.orders.storing.implementations.fastq_order_service import StoreFastqOrderService
+from cg.services.orders.validation.workflows.fastq.models.order import FastqOrder
+from cg.store.models import Application, Case, Order, Sample
+from cg.store.store import Store
+from tests.store_helpers import StoreHelpers
+
+
+def test_store_order_data_in_status_db(
+ store_to_submit_and_validate_orders: Store,
+ store_fastq_order_service: StoreFastqOrderService,
+ fastq_order: FastqOrder,
+ ticket_id_as_int: int,
+):
+ """Test that a Fastq order with two WGS samples, one being tumour, is stored in the database."""
+
+ # GIVEN a fastq order with two WGS samples, the first one being a tumour sample
+
+ # GIVEN a basic store with no samples nor cases
+ assert not store_to_submit_and_validate_orders._get_query(table=Sample).first()
+ assert store_to_submit_and_validate_orders._get_query(table=Case).count() == 0
+
+ # WHEN storing the order
+ new_samples: list[Sample] = store_fastq_order_service.store_order_data_in_status_db(fastq_order)
+
+ # THEN it should store the order
+ assert store_to_submit_and_validate_orders.get_order_by_ticket_id(ticket_id_as_int)
+
+ # THEN it should store the samples
+ db_samples: list[Sample] = store_to_submit_and_validate_orders._get_query(table=Sample).all()
+ assert set(new_samples) == set(db_samples)
+
+ # THEN it should create one case for the analysis and one MAF case
+ cases: list[Case] = store_to_submit_and_validate_orders._get_query(table=Case).all()
+ assert len(cases) == 2
+ assert len(db_samples[0].links) == 2
+ assert cases[0].data_analysis == Workflow.MIP_DNA
+ assert cases[1].data_analysis == Workflow.RAW_DATA
+
+ # THEN the analysis case has allowed data deliveries
+ assert cases[1].data_delivery in [DataDelivery.FASTQ, DataDelivery.NO_DELIVERY]
+
+ # THEN the sample sex should be stored
+ assert db_samples[0].sex == "male"
+
+ # THEN the MAF order should have one case linked to the tumour negative sample
+ maf_order: Order = store_to_submit_and_validate_orders.get_order_by_id(MAF_ORDER_ID)
+ maf_cases: list[Case] = maf_order.cases
+ assert len(maf_cases) == 1
+ assert not maf_cases[0].samples[0].is_tumour
+
+
+def test_store_fastq_samples_non_tumour_wgs_to_mip_maf_case(
+ store_to_submit_and_validate_orders: Store,
+ fastq_order: FastqOrder,
+ store_fastq_order_service: StoreFastqOrderService,
+):
+ """Test that a non-tumour WGS sample creates a MAF case with MIP as data analysis."""
+ # GIVEN a basic store with no samples nor cases
+ assert not store_to_submit_and_validate_orders._get_query(table=Sample).first()
+ assert store_to_submit_and_validate_orders._get_query(table=Case).count() == 0
+
+ # GIVEN a fastq order with the first sample being a non-tumour WGS sample
+ store_to_submit_and_validate_orders.get_application_by_tag(
+ fastq_order.samples[0].application
+ ).prep_category = SeqLibraryPrepCategory.WHOLE_GENOME_SEQUENCING
+ fastq_order.samples[0].tumour = False
+
+ # WHEN storing the order
+ new_samples = store_fastq_order_service.store_order_data_in_status_db(fastq_order)
+
+ # THEN a MAF case was created for the first sample
+ assert new_samples[0].links[0].case.data_analysis == Workflow.MIP_DNA
+
+ # THEN the case for the analysis is also created
+ assert new_samples[0].links[1].case.data_analysis == Workflow.RAW_DATA
+
+
+def test_store_fastq_samples_tumour_wgs_to_fastq_no_maf_case(
+ store_to_submit_and_validate_orders: Store,
+ fastq_order: FastqOrder,
+ store_fastq_order_service: StoreFastqOrderService,
+):
+ """Test that a tumour WGS sample does not create MAF cases."""
+ # GIVEN a basic store with no samples
+ assert not store_to_submit_and_validate_orders._get_query(table=Sample).first()
+ assert store_to_submit_and_validate_orders._get_query(table=Case).count() == 0
+
+ # GIVEN a fastq order with the second sample being a tumour WGS sample
+ store_to_submit_and_validate_orders.get_application_by_tag(
+ fastq_order.samples[0].application
+ ).prep_category = SeqLibraryPrepCategory.WHOLE_GENOME_SEQUENCING
+ fastq_order.samples[1].tumour = True
+
+ # WHEN storing the order
+ new_samples = store_fastq_order_service.store_order_data_in_status_db(fastq_order)
+
+ # THEN only one case is linked to the second sample
+ assert len(new_samples[1].links) == 1
+
+ # THEN the data analysis for the case should be RAW_DATA
+ assert new_samples[1].links[0].case.data_analysis == Workflow.RAW_DATA
+
+
+def test_store_fastq_samples_non_wgs_no_maf_case(
+ store_to_submit_and_validate_orders: Store,
+ fastq_order: FastqOrder,
+ store_fastq_order_service: StoreFastqOrderService,
+ helpers: StoreHelpers,
+):
+ """Test that an order with non-WGS samples creates no MAF cases."""
+ # GIVEN a basic store with no samples
+ assert not store_to_submit_and_validate_orders._get_query(table=Sample).first()
+ assert store_to_submit_and_validate_orders._get_query(table=Case).count() == 0
+
+ # GIVEN that the store has application versions for the non-WGS workflow
+ non_wgs_prep_category = SeqLibraryPrepCategory.WHOLE_EXOME_SEQUENCING
+ helpers.ensure_application_version(
+ store=store_to_submit_and_validate_orders, prep_category=non_wgs_prep_category
+ )
+
+ # GIVEN a fastq order with a non-WGS samples
+ non_wgs_applications: Application = store_to_submit_and_validate_orders._get_query(
+ table=Application
+ ).filter(Application.prep_category == non_wgs_prep_category)
+ assert non_wgs_applications
+ for sample in fastq_order.samples:
+ sample.application = non_wgs_applications[0].tag
+
+ # WHEN storing the order
+ new_samples = store_fastq_order_service.store_order_data_in_status_db(fastq_order)
+
+ # THEN only one case is linked to the sample
+ assert len(new_samples[0].links) == 1
+
+ # THEN the data analysis for the case should be RAW_DATA
+ assert new_samples[0].links[0].case.data_analysis == Workflow.RAW_DATA
diff --git a/tests/services/orders/store_service/test_generic_order_store_service.py b/tests/services/orders/store_service/test_generic_order_store_service.py
new file mode 100644
index 0000000000..efd9742ad4
--- /dev/null
+++ b/tests/services/orders/store_service/test_generic_order_store_service.py
@@ -0,0 +1,169 @@
+"""
+Module to test the store_order_data_in_status_db method of the StoreGenericOrderService class.
+The function store_order_data_in_status_db is never expected to fail, as its input order should
+have always been validated before calling the function.
+"""
+
+from cg.constants import DataDelivery, Priority, Workflow
+from cg.services.orders.storing.implementations.case_order_service import StoreCaseOrderService
+from cg.services.orders.validation.workflows.balsamic.models.order import BalsamicOrder
+from cg.services.orders.validation.workflows.mip_dna.models.order import MipDnaOrder
+from cg.services.orders.validation.workflows.mip_rna.models.order import MipRnaOrder
+from cg.services.orders.validation.workflows.rna_fusion.models.order import RnaFusionOrder
+from cg.services.orders.validation.workflows.tomte.models.order import TomteOrder
+from cg.store.models import Case, Sample
+from cg.store.store import Store
+
+
+def test_store_mip_order(
+ store_to_submit_and_validate_orders: Store,
+ mip_dna_order: MipDnaOrder,
+ store_generic_order_service: StoreCaseOrderService,
+):
+ # GIVEN a basic store with no samples nor cases
+ assert not store_to_submit_and_validate_orders._get_query(table=Sample).first()
+ assert not store_to_submit_and_validate_orders.get_cases()
+
+ # WHEN storing the order
+ new_cases: list[Case] = store_generic_order_service.store_order_data_in_status_db(mip_dna_order)
+
+ # THEN it should create and link samples and the case
+ assert len(new_cases) == 2
+ new_case = new_cases[0]
+ assert new_case.name == "MipCase1"
+ assert set(new_case.panels) == {"AID"}
+ assert new_case.priority_human == Priority.standard.name
+
+ assert len(new_case.links) == 3
+ new_link = new_case.links[2]
+ assert new_case.data_analysis == Workflow.MIP_DNA
+ assert new_case.data_delivery == str(DataDelivery.ANALYSIS_SCOUT)
+ assert (
+ new_case.synopsis
+ == "This is a long string to test the buffer length because surely this is the best way to do this and there are no better ways of doing this."
+ )
+ assert new_link.status == "affected"
+ assert new_link.father.name == "MipSample1"
+ assert new_link.mother.name == "MipSample2"
+ assert new_link.sample.name == "MipSample3"
+ assert new_link.sample.sex == "female"
+ assert new_link.sample.application_version.application.tag == "WGSPCFC030"
+ assert isinstance(new_case.links[1].sample.comment, str)
+
+ assert set(new_link.sample.phenotype_groups) == {"Phenotype-group"}
+ assert set(new_link.sample.phenotype_terms) == {"HP:0012747", "HP:0025049"}
+ assert new_link.sample.subject_id == "Subject3"
+
+
+def test_store_mip_rna_order(
+ store_to_submit_and_validate_orders: Store,
+ mip_rna_order: MipRnaOrder,
+ store_generic_order_service: StoreCaseOrderService,
+):
+ # GIVEN a basic store with no samples nor cases
+ rna_application_tag = "RNAPOAR025"
+ assert not store_to_submit_and_validate_orders._get_query(table=Sample).first()
+ assert not store_to_submit_and_validate_orders.get_cases()
+ assert store_to_submit_and_validate_orders.get_application_by_tag(tag=rna_application_tag)
+
+ # WHEN storing a MIP-RNA order containing 1 case with 2 samples and 1 case with only 1 sample
+ new_cases: list[Case] = store_generic_order_service.store_order_data_in_status_db(mip_rna_order)
+
+ # THEN it should create and link samples and the casing
+ assert len(new_cases) == 2
+ first_case = new_cases[0]
+
+ assert len(first_case.links) == 2
+ new_link = first_case.links[0]
+ assert first_case.data_analysis == Workflow.MIP_RNA
+ assert first_case.data_delivery == str(DataDelivery.ANALYSIS_SCOUT)
+ assert new_link.sample.name == "MipRNASample1"
+ assert new_link.sample.application_version.application.tag == rna_application_tag
+
+
+def test_store_balsamic_order(
+ store_to_submit_and_validate_orders: Store,
+ balsamic_order: BalsamicOrder,
+ store_generic_order_service: StoreCaseOrderService,
+):
+ # GIVEN a Balsamic order
+
+ # GIVEN a store with no samples nor cases
+ assert not store_to_submit_and_validate_orders._get_query(table=Sample).first()
+ assert not store_to_submit_and_validate_orders.get_cases()
+
+ # WHEN storing the order
+ new_cases: list[Case] = store_generic_order_service.store_order_data_in_status_db(
+ balsamic_order
+ )
+
+ # THEN it should create and link samples and the case
+ assert len(new_cases) == 1
+ new_case = new_cases[0]
+ assert new_case.name == "BalsamicCase"
+ assert new_case.data_analysis in [
+ Workflow.BALSAMIC,
+ Workflow.BALSAMIC_QC,
+ Workflow.BALSAMIC_UMI,
+ ]
+ assert new_case.data_delivery == str(DataDelivery.ANALYSIS_SCOUT)
+ assert not set(new_case.panels)
+ assert new_case.priority_human == Priority.standard.name
+
+ assert len(new_case.links) == 1
+ new_link = new_case.links[0]
+ assert new_link.sample.name == "BalsamicSample"
+ assert new_link.sample.sex == "male"
+ assert new_link.sample.application_version.application.tag == "PANKTTR100"
+ assert new_link.sample.comment == "This is a sample comment"
+ assert new_link.sample.is_tumour
+
+
+def test_store_rna_fusion_order(
+ store_to_submit_and_validate_orders: Store,
+ rnafusion_order: RnaFusionOrder,
+ store_generic_order_service: StoreCaseOrderService,
+):
+ # GIVEN a store with no samples nor cases
+ assert not store_to_submit_and_validate_orders._get_query(table=Sample).first()
+ assert not store_to_submit_and_validate_orders.get_cases()
+
+ # WHEN storing a RNA Fusion order
+ new_cases = store_generic_order_service.store_order_data_in_status_db(rnafusion_order)
+
+ # THEN it should create and link samples and the casing
+ assert len(new_cases) == 2
+ first_case = new_cases[0]
+
+ assert len(first_case.links) == 1
+ new_link = first_case.links[0]
+ assert first_case.data_analysis == Workflow.RNAFUSION
+ assert first_case.data_delivery == str(DataDelivery.FASTQ_ANALYSIS)
+ assert new_link.sample.name == "sample1-rna-t1"
+ assert new_link.sample.application_version.application.tag == "RNAPOAR025"
+ assert new_link
+
+
+def test_store_tomte_order(
+ store_to_submit_and_validate_orders: Store,
+ tomte_order: TomteOrder,
+ store_generic_order_service: StoreCaseOrderService,
+):
+ # GIVEN a store with no samples nor cases
+ assert not store_to_submit_and_validate_orders._get_query(table=Sample).first()
+ assert not store_to_submit_and_validate_orders.get_cases()
+
+ # WHEN storing a Tomte order
+ new_cases = store_generic_order_service.store_order_data_in_status_db(tomte_order)
+
+ # THEN it should create and link samples and the casing
+ assert len(new_cases) == 1
+ first_case = new_cases[0]
+
+ assert len(first_case.links) == 4
+ new_link = first_case.links[0]
+ assert first_case.data_analysis == Workflow.TOMTE
+ assert first_case.data_delivery == str(DataDelivery.FASTQ_ANALYSIS)
+ assert new_link.sample.name == "sample1"
+ assert new_link.sample.application_version.application.tag == "RNAPOAR025"
+ assert new_link
diff --git a/tests/services/orders/store_service/test_metagenome_store_service.py b/tests/services/orders/store_service/test_metagenome_store_service.py
new file mode 100644
index 0000000000..9da88631e4
--- /dev/null
+++ b/tests/services/orders/store_service/test_metagenome_store_service.py
@@ -0,0 +1,49 @@
+"""
+Module to test the store_order_data_in_status_db method of the StoreMetagenomeOrderService class.
+The function store_order_data_in_status_db is never expected to fail, as its input order should
+have always been validated before calling the function.
+"""
+
+import pytest
+
+from cg.services.orders.storing.implementations.metagenome_order_service import (
+ StoreMetagenomeOrderService,
+)
+from cg.services.orders.validation.workflows.metagenome.models.order import MetagenomeOrder
+from cg.services.orders.validation.workflows.taxprofiler.models.order import TaxprofilerOrder
+from cg.store.models import Sample
+from cg.store.store import Store
+
+
+@pytest.mark.parametrize(
+ "order_fixture",
+ ["metagenome_order", "taxprofiler_order"],
+ ids=["Metagenome", "Taxprofiler"],
+)
+def test_store_metagenome_order_data_in_status_db(
+ order_fixture: str,
+ store_metagenome_order_service: StoreMetagenomeOrderService,
+ store_to_submit_and_validate_orders: Store,
+ ticket_id_as_int: int,
+ request: pytest.FixtureRequest,
+):
+ # GIVEN an order
+ order: MetagenomeOrder | TaxprofilerOrder = request.getfixturevalue(order_fixture)
+
+ # GIVEN a store with no samples nor cases
+ assert not store_to_submit_and_validate_orders._get_query(table=Sample).first()
+ assert not store_to_submit_and_validate_orders.get_cases()
+
+ # WHEN storing the order
+ new_samples: list[Sample] = store_metagenome_order_service.store_order_data_in_status_db(order)
+
+ # THEN the samples should have been stored
+ db_samples: list[Sample] = store_to_submit_and_validate_orders._get_query(table=Sample).all()
+ assert set(new_samples) == set(db_samples)
+
+ # THEN the samples should have the correct application tag
+ for sample in db_samples:
+ assert sample.application_version.application.tag in ["METWPFR030", "METPCFR030"]
+
+ # THEN the order should be stored
+ assert store_to_submit_and_validate_orders.get_order_by_ticket_id(ticket_id_as_int)
diff --git a/tests/services/orders/store_service/test_microbial_fastq_order_store_service.py b/tests/services/orders/store_service/test_microbial_fastq_order_store_service.py
new file mode 100644
index 0000000000..a8d1ee403d
--- /dev/null
+++ b/tests/services/orders/store_service/test_microbial_fastq_order_store_service.py
@@ -0,0 +1,35 @@
+from cg.constants import DataDelivery
+from cg.services.orders.storing.implementations.microbial_fastq_order_service import (
+ StoreMicrobialFastqOrderService,
+)
+from cg.services.orders.validation.workflows.microbial_fastq.models.order import MicrobialFastqOrder
+from cg.store.models import Case, Sample
+from cg.store.store import Store
+
+
+def test_store_samples(
+ microbial_fastq_order: MicrobialFastqOrder,
+ store_microbial_fastq_order_service: StoreMicrobialFastqOrderService,
+):
+ # GIVEN a microbial fastq order with microbial samples
+
+ # GIVEN a basic store with no samples and a fastq order
+ store: Store = store_microbial_fastq_order_service.status_db
+ assert not store._get_query(table=Sample).first()
+ assert store._get_query(table=Case).count() == 0
+
+ # WHEN storing the order
+ new_samples = store_microbial_fastq_order_service.store_order_data_in_status_db(
+ order=microbial_fastq_order
+ )
+
+ # THEN it should store the samples and create a case for each sample
+ assert len(new_samples) == 2
+ assert len(store._get_query(table=Sample).all()) == 2
+ assert store._get_query(table=Case).count() == 2
+ first_sample = new_samples[0]
+ assert len(first_sample.links) == 1
+ case_link = first_sample.links[0]
+ assert case_link.case in store.get_cases()
+ assert case_link.case.data_analysis
+ assert case_link.case.data_delivery == DataDelivery.FASTQ
diff --git a/tests/services/orders/store_service/test_microbial_store_order_service.py b/tests/services/orders/store_service/test_microbial_store_order_service.py
new file mode 100644
index 0000000000..2052cf7165
--- /dev/null
+++ b/tests/services/orders/store_service/test_microbial_store_order_service.py
@@ -0,0 +1,105 @@
+"""
+Module to test the store_order_data_in_status_db method of the StoreMicrobialOrderService class.
+The function store_order_data_in_status_db is never expected to fail, as its input order should
+have always been validated before calling the function.
+"""
+
+from cg.constants import DataDelivery
+from cg.constants.constants import Workflow
+from cg.models.orders.sample_base import ControlEnum
+from cg.services.orders.storing.implementations.microbial_order_service import (
+ StoreMicrobialOrderService,
+)
+from cg.services.orders.validation.workflows.microsalt.models.order import MicrosaltOrder
+from cg.services.orders.validation.workflows.mutant.models.order import MutantOrder
+from cg.store.models import Case, Organism, Sample
+from cg.store.store import Store
+
+
+def test_store_microsalt_order_data_in_status_db(
+ store_to_submit_and_validate_orders: Store,
+ microsalt_order: MicrosaltOrder,
+ store_microbial_order_service: StoreMicrobialOrderService,
+):
+ # GIVEN a store with no samples nor cases
+ assert store_to_submit_and_validate_orders._get_query(table=Sample).count() == 0
+ assert not store_to_submit_and_validate_orders.get_cases()
+
+ # GIVEN that the store has no organisms
+ assert store_to_submit_and_validate_orders.get_all_organisms().count() == 0
+
+ # WHEN storing the order
+ new_samples: list[Sample] = store_microbial_order_service.store_order_data_in_status_db(
+ microsalt_order
+ )
+
+ # THEN it should store the samples under a case
+ db_samples: list[Sample] = store_to_submit_and_validate_orders._get_query(table=Sample).all()
+ assert set(new_samples) == set(db_samples)
+ case_from_sample: Case = db_samples[0].links[0].case
+ db_case: Case = store_to_submit_and_validate_orders.get_cases()[0]
+ assert db_case == case_from_sample
+
+ # THEN it should store the organisms
+ assert store_to_submit_and_validate_orders.get_all_organisms().count() > 0
+
+ # THEN the case should have the correct data analysis and data delivery
+ assert db_case.data_analysis == Workflow.MICROSALT
+ assert db_case.data_delivery == str(DataDelivery.FASTQ_QC_ANALYSIS)
+
+
+def test_store_microbial_new_organism_in_status_db(
+ store_to_submit_and_validate_orders: Store,
+ microsalt_order: MicrosaltOrder,
+ store_microbial_order_service: StoreMicrobialOrderService,
+):
+ """Test that a new organism in a Microsalt order is stored in the status db."""
+ # GIVEN a store with no organisms
+ assert store_to_submit_and_validate_orders.get_all_organisms().count() == 0
+
+ # GIVEN a Microsalt order with a new organism
+ microsalt_order.samples[0].organism = "Canis lupus familiaris"
+ microsalt_order.samples[0].reference_genome = "UU_Cfam_GSD_1.0"
+
+ # WHEN storing the order
+ store_microbial_order_service.store_order_data_in_status_db(microsalt_order)
+
+ # THEN the organism should be stored in the status db
+ organisms: list[Organism] = store_to_submit_and_validate_orders.get_all_organisms().all()
+ dog: Organism = [
+ organism for organism in organisms if organism.name == "Canis lupus familiaris"
+ ][0]
+ assert dog.reference_genome == "UU_Cfam_GSD_1.0"
+
+ # THEN the organism should not be verified
+ assert not dog.verified
+
+
+def test_store_mutant_order_data_control_has_stored_value(
+ mutant_order: MutantOrder,
+ store_to_submit_and_validate_orders: Store,
+ store_microbial_order_service: StoreMicrobialOrderService,
+):
+ # GIVEN a Mutant order with one positive and one negative control
+
+ # GIVEN a store with no samples nor cases
+ assert store_to_submit_and_validate_orders._get_query(table=Sample).count() == 0
+ assert not store_to_submit_and_validate_orders.get_cases()
+
+ # WHEN storing the order
+ new_samples: list[Sample] = store_microbial_order_service.store_order_data_in_status_db(
+ mutant_order
+ )
+
+ # THEN it should store the samples under a case
+ db_samples: list[Sample] = store_to_submit_and_validate_orders._get_query(table=Sample).all()
+ assert set(new_samples) == set(db_samples)
+ case_from_sample: Case = db_samples[0].links[0].case
+ db_case: Case = store_to_submit_and_validate_orders.get_cases()[0]
+ assert db_case == case_from_sample
+
+ # THEN the control samples should have the correct control value
+ positive: Sample = store_to_submit_and_validate_orders.get_sample_by_name("control-positive")
+ assert positive.control == ControlEnum.positive
+ negative: Sample = store_to_submit_and_validate_orders.get_sample_by_name("control-negative")
+ assert negative.control == ControlEnum.negative
diff --git a/tests/services/orders/store_service/test_pacbio_order_service.py b/tests/services/orders/store_service/test_pacbio_order_service.py
new file mode 100644
index 0000000000..312d8ce3ee
--- /dev/null
+++ b/tests/services/orders/store_service/test_pacbio_order_service.py
@@ -0,0 +1,44 @@
+from cg.constants import DataDelivery, Workflow
+from cg.models.orders.sample_base import SexEnum
+from cg.services.orders.storing.implementations.pacbio_order_service import StorePacBioOrderService
+from cg.services.orders.validation.workflows.pacbio_long_read.models.order import PacbioOrder
+from cg.store.models import Case, Order, Sample
+from cg.store.store import Store
+
+
+def test_store_pacbio_order_data_in_status_db(
+ store_to_submit_and_validate_orders: Store,
+ pacbio_order: PacbioOrder,
+ store_pacbio_order_service: StorePacBioOrderService,
+):
+ """Test that a PacBio order is stored in the database."""
+ # GIVEN a valid Pacbio order and a Pacbio store service
+
+ # GIVEN a basic store with no samples, cases and only a MAF order
+ assert not store_to_submit_and_validate_orders._get_query(table=Sample).first()
+ assert store_to_submit_and_validate_orders._get_query(table=Case).count() == 0
+ assert store_to_submit_and_validate_orders._get_query(table=Order).count() == 1
+
+ # WHEN storing the order
+ new_samples: list[Sample] = store_pacbio_order_service.store_order_data_in_status_db(
+ order=pacbio_order
+ )
+
+ # THEN it should store the order
+ assert store_to_submit_and_validate_orders._get_query(table=Order).count() == 2
+
+ # THEN it should store the samples and create a case for each sample
+ assert len(new_samples) == 3
+ assert len(store_to_submit_and_validate_orders._get_query(table=Sample).all()) == 3
+ assert store_to_submit_and_validate_orders._get_query(table=Case).count() == 3
+ for new_sample in new_samples:
+ # THEN the sample sex should be stored
+ assert new_sample.sex == SexEnum.male
+ # THEN the sample should have a relationship with a case
+ assert len(new_sample.links) == 1
+ case_link = new_sample.links[0]
+ assert case_link.case in store_to_submit_and_validate_orders.get_cases()
+ # THEN the analysis for the case should be RAW_DATA
+ assert case_link.case.data_analysis == Workflow.RAW_DATA
+ # THEN the delivery type for the case should be BAM or NO_DELIVERY
+ assert case_link.case.data_delivery in [DataDelivery.BAM, DataDelivery.NO_DELIVERY]
diff --git a/tests/services/orders/store_service/test_pool_order_store_service.py b/tests/services/orders/store_service/test_pool_order_store_service.py
new file mode 100644
index 0000000000..42a8da0c74
--- /dev/null
+++ b/tests/services/orders/store_service/test_pool_order_store_service.py
@@ -0,0 +1,73 @@
+"""
+Module to test the store_order_data_in_status_db method of the StorePoolOrderService class.
+The function store_order_data_in_status_db is never expected to fail, as its input order should
+have always been validated before calling the function.
+"""
+
+import pytest
+
+from cg.constants import Workflow
+from cg.services.orders.storing.implementations.pool_order_service import StorePoolOrderService
+from cg.services.orders.validation.models.order_aliases import OrderWithIndexedSamples
+from cg.store.models import Case, CaseSample, Pool, Sample
+from cg.store.store import Store
+
+
+@pytest.mark.parametrize(
+ "order_fixture, workflow",
+ [("rml_order", Workflow.RAW_DATA), ("fluffy_order", Workflow.FLUFFY)],
+ ids=["RML", "Fluffy"],
+)
+def test_store_pool_order_data_in_status_db(
+ store_to_submit_and_validate_orders: Store,
+ order_fixture: str,
+ ticket_id: str,
+ store_pool_order_service: StorePoolOrderService,
+ workflow: Workflow,
+ request: pytest.FixtureRequest,
+):
+ """Test that a Fluffy or RML order is stored in the database."""
+ # GIVEN a valid Fluffy or RML order
+ order: OrderWithIndexedSamples = request.getfixturevalue(order_fixture)
+
+ # GIVEN a store with no samples, pools, nor cases
+ assert store_to_submit_and_validate_orders._get_query(table=Sample).count() == 0
+ assert store_to_submit_and_validate_orders._get_query(table=Pool).count() == 0
+ assert store_to_submit_and_validate_orders._get_query(table=Case).count() == 0
+ assert store_to_submit_and_validate_orders._get_query(table=CaseSample).count() == 0
+
+ # WHEN storing the order
+ new_pools: list[Pool] = store_pool_order_service.store_order_data_in_status_db(order=order)
+
+ # THEN it should return the pools
+ assert len(new_pools) == 4
+ assert isinstance(new_pools[0], Pool)
+
+ # THEN the pools should be stored in the database
+ db_pools: list[Pool] = store_to_submit_and_validate_orders._get_query(table=Pool).all()
+ assert len(db_pools) == 4
+ assert set(new_pools) == set(db_pools)
+
+ # THEN the database pools should be invoiced, have a RML application and the correct ticket id
+ for pool in db_pools:
+ assert not pool.no_invoice
+ assert pool.application_version.application.tag.startswith("RML")
+ assert pool.ticket == ticket_id
+
+ # THEN the order should be stored, have the correct ticket id
+ assert store_to_submit_and_validate_orders.get_order_by_ticket_id(int(ticket_id))
+
+ # THEN it should store the samples and create a case for each sample
+ new_samples: list[Sample] = store_to_submit_and_validate_orders._get_query(table=Sample).all()
+ new_cases: list[Case] = store_to_submit_and_validate_orders._get_query(table=Case).all()
+ assert len(new_samples) == 4
+ assert len(new_cases) == 4
+ assert store_to_submit_and_validate_orders._get_query(table=CaseSample).count() == 4
+
+ # THEN the samples are not set for invoicing
+ for sample in new_samples:
+ assert sample.no_invoice
+
+ # THEN the cases should have the correct data analysis
+ for case in new_cases:
+ assert case.data_analysis == workflow
diff --git a/tests/services/orders/store_service/test_registry.py b/tests/services/orders/store_service/test_registry.py
new file mode 100644
index 0000000000..a762a874de
--- /dev/null
+++ b/tests/services/orders/store_service/test_registry.py
@@ -0,0 +1,70 @@
+import pytest
+
+from cg.models.orders.constants import OrderType
+from cg.services.orders.storing.service import StoreOrderService
+from cg.services.orders.storing.service_registry import StoringServiceRegistry
+
+
+@pytest.mark.parametrize(
+ "order_type, storing_service_fixture",
+ [
+ (OrderType.BALSAMIC, "store_generic_order_service"),
+ (OrderType.BALSAMIC_QC, "store_generic_order_service"),
+ (OrderType.FASTQ, "store_fastq_order_service"),
+ (OrderType.FLUFFY, "store_pool_order_service"),
+ (OrderType.METAGENOME, "store_metagenome_order_service"),
+ (OrderType.MICROBIAL_FASTQ, "store_microbial_fastq_order_service"),
+ (OrderType.MICROSALT, "store_microbial_order_service"),
+ (OrderType.MIP_DNA, "store_generic_order_service"),
+ (OrderType.MIP_RNA, "store_generic_order_service"),
+ (OrderType.PACBIO_LONG_READ, "store_pacbio_order_service"),
+ (OrderType.RML, "store_pool_order_service"),
+ (OrderType.RNAFUSION, "store_generic_order_service"),
+ (OrderType.SARS_COV_2, "store_microbial_order_service"),
+ (OrderType.TAXPROFILER, "store_metagenome_order_service"),
+ (OrderType.TOMTE, "store_generic_order_service"),
+ ],
+ ids=[
+ "balsamic",
+ "balsamic_qc",
+ "fastq",
+ "fluffy",
+ "metagenome",
+ "microbial_fastq",
+ "microbial",
+ "mip_dna",
+ "mip_rna",
+ "pacbio_long_read",
+ "rml",
+ "rnafusion",
+ "sars_cov_2",
+ "taxprofiler",
+ "tomte",
+ ],
+)
+def test_get_storing_service(
+ storing_service_registry: StoringServiceRegistry,
+ order_type: OrderType,
+ storing_service_fixture: str,
+ request: pytest.FixtureRequest,
+):
+ """Test that getting a storing service returns the correct service for any known order type."""
+ # GIVEN a storing service registry
+
+ # WHEN getting a storing service for a known order type
+ storing_service: StoreOrderService = storing_service_registry.get_storing_service(order_type)
+
+ # THEN the correct storing service should be returned
+ expected_storing_service: StoreOrderService = request.getfixturevalue(storing_service_fixture)
+ assert isinstance(storing_service, type(expected_storing_service))
+
+
+def test_get_storing_registry_unknown_order_type(storing_service_registry: StoringServiceRegistry):
+ """Test that getting a storing service for an unknown order type raises a ValueError."""
+ # GIVEN a storing service registry
+
+ # WHEN getting a storing service for an unknown order type
+
+ # THEN it should raise a ValueError
+ with pytest.raises(ValueError):
+ storing_service_registry.get_storing_service(order_type="non_existing_order_type")
diff --git a/tests/services/orders/submitter/test_order_submitter.py b/tests/services/orders/submitter/test_order_submitter.py
new file mode 100644
index 0000000000..57edb4458c
--- /dev/null
+++ b/tests/services/orders/submitter/test_order_submitter.py
@@ -0,0 +1,182 @@
+import datetime as dt
+from unittest.mock import patch
+
+import pytest
+
+from cg.clients.freshdesk.models import TicketResponse
+from cg.exc import TicketCreationError
+from cg.models.orders.constants import OrderType
+from cg.services.orders.constants import ORDER_TYPE_WORKFLOW_MAP
+from cg.services.orders.storing.constants import MAF_ORDER_ID
+from cg.services.orders.submitter.service import OrderSubmitter
+from cg.services.orders.validation.errors.validation_errors import ValidationErrors
+from cg.services.orders.validation.models.order import Order
+from cg.services.orders.validation.models.order_with_cases import OrderWithCases
+from cg.services.orders.validation.models.order_with_samples import OrderWithSamples
+from cg.services.orders.validation.workflows.mip_dna.models.order import MipDnaOrder
+from cg.store.models import Case
+from cg.store.models import Order as DbOrder
+from cg.store.models import Pool, Sample, User
+from cg.store.store import Store
+
+
+def monkeypatch_process_lims(monkeypatch: pytest.MonkeyPatch, order: Order) -> None:
+ lims_project_data = {"id": "ADM1234", "date": dt.datetime.now()}
+ if isinstance(order, OrderWithSamples):
+ lims_map = {sample.name: f"ELH123A{index}" for index, sample in enumerate(order.samples)}
+ elif isinstance(order, OrderWithCases):
+ lims_map = {
+ sample.name: f"ELH123A{case_index}-{sample_index}"
+ for case_index, sample_index, sample in order.enumerated_new_samples
+ }
+ monkeypatch.setattr(
+ "cg.services.orders.lims_service.service.OrderLimsService.process_lims",
+ lambda *args, **kwargs: (lims_project_data, lims_map),
+ )
+
+
+def mock_freshdesk_ticket_creation(mock_create_ticket: callable, ticket_id: str):
+ """Helper function to mock Freshdesk ticket creation."""
+ mock_create_ticket.return_value = TicketResponse(
+ id=int(ticket_id),
+ description="This is a test description.",
+ subject="Support needed..",
+ status=2,
+ priority=1,
+ )
+
+
+def mock_freshdesk_reply_to_ticket(mock_reply_to_ticket: callable):
+ """Helper function to mock Freshdesk reply to ticket."""
+ mock_reply_to_ticket.return_value = None
+
+
+@pytest.mark.parametrize(
+ "order_type, order_fixture",
+ [
+ (OrderType.BALSAMIC, "balsamic_order"),
+ (OrderType.FASTQ, "fastq_order"),
+ (OrderType.FLUFFY, "fluffy_order"),
+ (OrderType.METAGENOME, "metagenome_order"),
+ (OrderType.MICROBIAL_FASTQ, "microbial_fastq_order"),
+ (OrderType.MICROSALT, "microsalt_order"),
+ (OrderType.MIP_DNA, "mip_dna_order"),
+ (OrderType.MIP_RNA, "mip_rna_order"),
+ (OrderType.PACBIO_LONG_READ, "pacbio_order"),
+ (OrderType.RML, "rml_order"),
+ (OrderType.RNAFUSION, "rnafusion_order"),
+ (OrderType.SARS_COV_2, "mutant_order"),
+ (OrderType.TAXPROFILER, "taxprofiler_order"),
+ (OrderType.TOMTE, "tomte_order"),
+ ],
+)
+def test_submit_order(
+ store_to_submit_and_validate_orders: Store,
+ monkeypatch: pytest.MonkeyPatch,
+ order_type: OrderType,
+ order_fixture: str,
+ order_submitter: OrderSubmitter,
+ ticket_id: str,
+ customer_id: str,
+ request: pytest.FixtureRequest,
+):
+ """Test submitting a valid order of each ordertype."""
+ # GIVEN an order
+ order: Order = request.getfixturevalue(order_fixture)
+
+ # GIVEN a store without samples, cases, or pools
+ assert not store_to_submit_and_validate_orders._get_query(table=Sample).first()
+ assert not store_to_submit_and_validate_orders._get_query(table=Case).first()
+ assert not store_to_submit_and_validate_orders._get_query(table=Pool).first()
+
+ # GIVEN that the only order in store is a MAF order
+ orders: list[DbOrder] = store_to_submit_and_validate_orders._get_query(table=DbOrder).all()
+ assert len(orders) == 1
+ assert orders[0].id == MAF_ORDER_ID
+
+ # GIVEN a ticketing system that returns a ticket number
+ with (
+ patch(
+ "cg.clients.freshdesk.freshdesk_client.FreshdeskClient.create_ticket"
+ ) as mock_create_ticket,
+ patch(
+ "cg.clients.freshdesk.freshdesk_client.FreshdeskClient.reply_to_ticket"
+ ) as mock_reply_to_ticket,
+ ):
+ mock_freshdesk_ticket_creation(mock_create_ticket=mock_create_ticket, ticket_id=ticket_id)
+ mock_freshdesk_reply_to_ticket(mock_reply_to_ticket)
+
+ # GIVEN a mock LIMS that returns project data and sample name mapping
+ monkeypatch_process_lims(monkeypatch=monkeypatch, order=order)
+
+ # GIVEN a registered user
+ user: User = store_to_submit_and_validate_orders._get_query(table=User).first()
+
+ # GIVEN the dict representation of the order and a store without samples
+ raw_order = order.model_dump(by_alias=True)
+ assert not store_to_submit_and_validate_orders._get_query(table=Sample).first()
+
+ # WHEN submitting the order
+ result = order_submitter.submit(order_type=order_type, raw_order=raw_order, user=user)
+
+ # THEN the result should contain the project data
+ assert result["project"]["id"] == "ADM1234"
+
+ # THEN the records should contain the appropriate ticket id, customer id and data analysis
+ is_pool_order: bool = False
+ for record in result["records"]:
+ assert record.customer.internal_id == customer_id
+ if isinstance(record, Pool):
+ assert record.ticket == ticket_id
+ is_pool_order = True
+ elif isinstance(record, Sample):
+ assert record.original_ticket == ticket_id
+ elif isinstance(record, Case):
+ assert record.data_analysis == ORDER_TYPE_WORKFLOW_MAP[order_type]
+ for link_obj in record.links:
+ assert link_obj.sample.original_ticket == ticket_id
+
+ # THEN the order should be stored in the database
+ assert store_to_submit_and_validate_orders.get_order_by_ticket_id(ticket_id=int(ticket_id))
+
+ # THEN the samples should be stored in the database
+ assert store_to_submit_and_validate_orders._get_query(table=Sample).first()
+
+ # THEN the cases should be stored in the database
+ assert store_to_submit_and_validate_orders._get_query(table=Case).first()
+
+ # THEN the pools should be stored in the database if applicable
+ if is_pool_order:
+ assert store_to_submit_and_validate_orders._get_query(table=Pool).first()
+
+
+def test_submit_ticketexception(
+ order_submitter: OrderSubmitter,
+ mip_dna_order: MipDnaOrder,
+):
+
+ # GIVEN an order
+ raw_order = mip_dna_order.model_dump()
+ raw_order["project_type"] = mip_dna_order.order_type
+
+ # GIVEN a registered user
+ user: User = order_submitter.validation_service.store._get_query(table=User).first()
+
+ # GIVEN a mock Freshdesk ticket creation that raises TicketCreationError
+ with (
+ patch(
+ "cg.clients.freshdesk.freshdesk_client.FreshdeskClient.create_ticket",
+ side_effect=TicketCreationError("ERROR"),
+ ),
+ patch(
+ "cg.services.orders.validation.service.OrderValidationService._get_rule_validation_errors",
+ return_value=ValidationErrors(),
+ ),
+ ):
+
+ # WHEN the order is submitted and a TicketCreationError raised
+ # THEN the TicketCreationError is not excepted
+ with pytest.raises(TicketCreationError):
+ order_submitter.submit(
+ raw_order=raw_order, user=user, order_type=mip_dna_order.order_type
+ )
diff --git a/tests/meta/orders/test_ticket_handler.py b/tests/services/orders/submitter/test_ticket_handler.py
similarity index 66%
rename from tests/meta/orders/test_ticket_handler.py
rename to tests/services/orders/submitter/test_ticket_handler.py
index ef23284aed..0025e970b8 100644
--- a/tests/meta/orders/test_ticket_handler.py
+++ b/tests/services/orders/submitter/test_ticket_handler.py
@@ -1,15 +1,4 @@
-from cg.meta.orders.ticket_handler import TicketHandler
-
-
-def test_parse_ticket_number(ticket_id: str):
- # GIVEN a string with a ticket number
- order_name = f"#{ticket_id}"
-
- # WHEN parsing the string
- result = TicketHandler.parse_ticket_number(order_name)
-
- # THEN assert that the correct string was parsed
- assert result == ticket_id
+from cg.services.orders.submitter.ticket_handler import TicketHandler
def test_add_user_name_message(ticket_handler: TicketHandler):
diff --git a/tests/services/orders/order_status_service/conftest.py b/tests/services/orders/summary_service/conftest.py
similarity index 100%
rename from tests/services/orders/order_status_service/conftest.py
rename to tests/services/orders/summary_service/conftest.py
diff --git a/tests/services/orders/order_status_service/test_order_summary_service.py b/tests/services/orders/summary_service/test_order_summary_service.py
similarity index 100%
rename from tests/services/orders/order_status_service/test_order_summary_service.py
rename to tests/services/orders/summary_service/test_order_summary_service.py
diff --git a/tests/services/orders/test_validate_order_service/conftest.py b/tests/services/orders/test_validate_order_service/conftest.py
deleted file mode 100644
index 27d2ae9d19..0000000000
--- a/tests/services/orders/test_validate_order_service/conftest.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import pytest
-
-from cg.constants import DataDelivery, Workflow
-from cg.models.orders.order import OrderIn
-from cg.models.orders.sample_base import SexEnum
-from cg.models.orders.samples import PacBioSample
-from cg.services.orders.validate_order_services.validate_pacbio_order import (
- ValidatePacbioOrderService,
-)
-from cg.store.store import Store
-
-
-@pytest.fixture
-def pacbio_sample() -> PacBioSample:
- return PacBioSample(
- application="WGSPCFC060",
- data_analysis=Workflow.RAW_DATA,
- data_delivery=DataDelivery.NO_DELIVERY,
- name="PacbioSample",
- sex=SexEnum.unknown,
- tumour=False,
- volume="50",
- buffer="buffer",
- source="source",
- subject_id="subject_id",
- container="Tube",
- )
-
-
-@pytest.fixture
-def pacbio_order(pacbio_sample: PacBioSample) -> OrderIn:
- return OrderIn(
- customer="cust000",
- name="PacbioOrder",
- samples=[pacbio_sample],
- )
-
-
-@pytest.fixture
-def validate_pacbio_order_service(sample_store: Store) -> ValidatePacbioOrderService:
- return ValidatePacbioOrderService(sample_store)
diff --git a/tests/services/orders/test_validate_order_service/test_validate_generic_order.py b/tests/services/orders/test_validate_order_service/test_validate_generic_order.py
deleted file mode 100644
index f74f5842d0..0000000000
--- a/tests/services/orders/test_validate_order_service/test_validate_generic_order.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import pytest
-
-from cg.exc import OrderError
-from cg.models.orders.constants import OrderType
-from cg.models.orders.order import OrderIn
-from cg.services.orders.validate_order_services.validate_case_order import (
- ValidateCaseOrderService,
-)
-from cg.store.store import Store
-
-
-def test__validate_one_sample_per_case_multiple_samples(
- base_store: Store,
- rnafusion_order_to_submit: dict,
-):
- """Tests the validation of an RNAFUSION order where two samples have the same family_name."""
- ### GIVEN an RNAFUSION order where the first and last sample share the same case
- order_data = OrderIn.parse_obj(obj=rnafusion_order_to_submit, project=OrderType.RNAFUSION)
- order_data.samples[-1].family_name = order_data.samples[0].family_name
- validator = ValidateCaseOrderService(base_store)
-
- ### WHEN validating that each case has only one sample
- ### THEN an OrderError should be raised
-
- with pytest.raises(OrderError):
- validator._validate_only_one_sample_per_case(order_data.samples)
-
-
-def test__validate_one_sample_per_case_unique_samples(
- base_store: Store,
- rnafusion_order_to_submit: dict,
-):
- """Tests the validation of an RNAFUSION order where all samples have unique family_name."""
- ### GIVEN an RNAFUSION order with unique family names
- order_data: OrderIn = OrderIn.parse_obj(
- obj=rnafusion_order_to_submit, project=OrderType.RNAFUSION
- )
- validator = ValidateCaseOrderService(base_store)
-
- ### WHEN validating that each case has only one sample
- validator._validate_only_one_sample_per_case(order_data.samples)
-
- ### THEN no errors should be raised
diff --git a/tests/services/orders/test_validate_order_service/test_validate_microbial_order_service.py b/tests/services/orders/test_validate_order_service/test_validate_microbial_order_service.py
deleted file mode 100644
index ac1ee2d893..0000000000
--- a/tests/services/orders/test_validate_order_service/test_validate_microbial_order_service.py
+++ /dev/null
@@ -1,64 +0,0 @@
-import pytest
-
-from cg.exc import OrderError
-
-from cg.models.orders.constants import OrderType
-from cg.models.orders.order import OrderIn
-from cg.models.orders.sample_base import ControlEnum
-from cg.models.orders.samples import SarsCov2Sample
-from cg.services.orders.validate_order_services.validate_microbial_order import (
- ValidateMicrobialOrderService,
-)
-from cg.store.store import Store
-from tests.store_helpers import StoreHelpers
-
-
-def test_validate_normal_order(sarscov2_order_to_submit: dict, base_store: Store):
- # GIVEN sarscov2 order with three samples, none in the database
- order = OrderIn.parse_obj(sarscov2_order_to_submit, OrderType.SARS_COV_2)
-
- # WHEN validating the order
- ValidateMicrobialOrderService(base_store).validate_order(order=order)
- # THEN it should be regarded as valid
-
-
-def test_validate_submitted_order(
- sarscov2_order_to_submit: dict, base_store: Store, helpers: StoreHelpers
-):
- # GIVEN sarscov2 order with three samples, all in the database
- order: OrderIn = OrderIn.parse_obj(sarscov2_order_to_submit, OrderType.SARS_COV_2)
-
- sample: SarsCov2Sample
- for sample in order.samples:
- helpers.add_sample(store=base_store, customer_id=order.customer, name=sample.name)
-
- # WHEN validating the order
- # THEN it should be regarded as invalid
- with pytest.raises(OrderError):
- ValidateMicrobialOrderService(base_store).validate_order(order=order)
-
-
-def test_validate_submitted_control_order(
- sarscov2_order_to_submit: dict, base_store: Store, helpers: StoreHelpers
-):
- # GIVEN sarscov2 order with three control samples, all in the database
- order: OrderIn = OrderIn.parse_obj(sarscov2_order_to_submit, OrderType.SARS_COV_2)
-
- sample: SarsCov2Sample
- for sample in order.samples:
- helpers.add_sample(store=base_store, customer_id=order.customer, name=sample.name)
- sample.control = ControlEnum.positive
-
- # WHEN validating the order
- # THEN it should be regarded as valid
- ValidateMicrobialOrderService(base_store).validate_order(order=order)
-
-
-def test_validate_microbial_fast_order(microbial_fastq_order_to_submit: dict, base_store: Store):
- # GIVEN a microbial order with three samples, none in the database
-
- # WHEN validating the order
- order = OrderIn.parse_obj(microbial_fastq_order_to_submit, OrderType.MICROBIAL_FASTQ)
-
- # THEN it should be regarded as valid
- ValidateMicrobialOrderService(base_store).validate_order(order=order)
diff --git a/tests/services/orders/test_validate_order_service/test_validate_pacbio_order_service.py b/tests/services/orders/test_validate_order_service/test_validate_pacbio_order_service.py
deleted file mode 100644
index 872ccc2a64..0000000000
--- a/tests/services/orders/test_validate_order_service/test_validate_pacbio_order_service.py
+++ /dev/null
@@ -1,63 +0,0 @@
-import pytest
-
-from cg.exc import OrderError
-from cg.models.orders.order import OrderIn
-from cg.services.orders.validate_order_services.validate_pacbio_order import (
- ValidatePacbioOrderService,
-)
-from cg.store.store import Store
-
-
-def test_validate_valid_pacbio_order(
- validate_pacbio_order_service: ValidatePacbioOrderService, pacbio_order: OrderIn
-):
- # GIVEN a valid PacBio order
-
- # WHEN validating the order
- validate_pacbio_order_service.validate_order(pacbio_order)
-
- # THEN no error is raised
-
-
-def test_validate_pacbio_order_unknown_customer(
- pacbio_order: OrderIn, validate_pacbio_order_service: ValidatePacbioOrderService
-):
- # GIVEN a PacBio order with an unknown customer
- pacbio_order.customer = "unknown_customer"
-
- # WHEN validating the order
-
- # THEN an order error should be raised
- with pytest.raises(OrderError):
- validate_pacbio_order_service.validate_order(pacbio_order)
-
-
-def test_validate_pacbio_order_invalid_application(
- pacbio_order: OrderIn, validate_pacbio_order_service: ValidatePacbioOrderService
-):
- # GIVEN a PacBio order with an unknown application
- pacbio_order.samples[0].application = "unknown_application"
-
- # WHEN validating the order
-
- # THEN an order error should be raised
- with pytest.raises(OrderError):
- validate_pacbio_order_service.validate_order(pacbio_order)
-
-
-def test_validate_pacbio_order_reused_sample_name(
- pacbio_order: OrderIn, validate_pacbio_order_service: ValidatePacbioOrderService
-):
- # GIVEN a PacBio order with a reused sample name
- status_db: Store = validate_pacbio_order_service.status_db
- customer = status_db.get_customer_by_internal_id(pacbio_order.customer)
- old_sample_name: str = status_db.get_samples_by_customers_and_pattern(customers=[customer])[
- 0
- ].name
- pacbio_order.samples[0].name = old_sample_name
-
- # WHEN validating the order
-
- # THEN an order error should be raised
- with pytest.raises(OrderError):
- validate_pacbio_order_service.validate_order(pacbio_order)
diff --git a/tests/services/orders/test_validate_order_service/test_validate_pool_order_service.py b/tests/services/orders/test_validate_order_service/test_validate_pool_order_service.py
deleted file mode 100644
index 98e138f0f6..0000000000
--- a/tests/services/orders/test_validate_order_service/test_validate_pool_order_service.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import pytest
-
-from cg.constants import DataDelivery
-from cg.constants.constants import Workflow
-from cg.exc import OrderError
-from cg.models.orders.constants import OrderType
-from cg.models.orders.order import OrderIn
-from cg.models.orders.samples import RmlSample
-from cg.services.orders.validate_order_services.validate_pool_order import ValidatePoolOrderService
-from cg.store.models import Customer
-from cg.store.store import Store
-from tests.store_helpers import StoreHelpers
-
-
-def test_validate_normal_order(rml_order_to_submit: dict, base_store: Store):
- # GIVEN pool order with three samples, none in the database
- order = OrderIn.parse_obj(rml_order_to_submit, OrderType.RML)
-
- # WHEN validating the order
- ValidatePoolOrderService(status_db=base_store).validate_order(order=order)
- # THEN it should be regarded as valid
-
-
-def test_validate_case_name(rml_order_to_submit: dict, base_store: Store, helpers: StoreHelpers):
- # GIVEN pool order with a case already all in the database
- order: OrderIn = OrderIn.parse_obj(rml_order_to_submit, OrderType.RML)
-
- sample: RmlSample
- customer: Customer = helpers.ensure_customer(store=base_store, customer_id=order.customer)
- for sample in order.samples:
- case = helpers.ensure_case(
- store=base_store,
- case_name=ValidatePoolOrderService.create_case_name(
- ticket=order.ticket, pool_name=sample.pool
- ),
- customer=customer,
- data_analysis=Workflow.FLUFFY,
- data_delivery=DataDelivery.STATINA,
- )
- base_store.session.add(case)
- base_store.session.commit()
-
- # WHEN validating the order
- # THEN it should be regarded as invalid
- with pytest.raises(OrderError):
- ValidatePoolOrderService(status_db=base_store).validate_order(order=order)
diff --git a/tests/services/orders/validation_service/conftest.py b/tests/services/orders/validation_service/conftest.py
new file mode 100644
index 0000000000..abd05ead4e
--- /dev/null
+++ b/tests/services/orders/validation_service/conftest.py
@@ -0,0 +1,297 @@
+import pytest
+
+from cg.constants.constants import GenomeVersion
+from cg.constants.sequencing import SeqLibraryPrepCategory
+from cg.models.orders.constants import OrderType
+from cg.models.orders.sample_base import ContainerEnum, ControlEnum, SexEnum, StatusEnum
+from cg.services.orders.validation.constants import MINIMUM_VOLUME
+from cg.services.orders.validation.models.existing_sample import ExistingSample
+from cg.services.orders.validation.order_type_maps import ORDER_TYPE_RULE_SET_MAP, RuleSet
+from cg.services.orders.validation.service import OrderValidationService
+from cg.services.orders.validation.workflows.tomte.constants import TomteDeliveryType
+from cg.services.orders.validation.workflows.tomte.models.case import TomteCase
+from cg.services.orders.validation.workflows.tomte.models.order import TomteOrder
+from cg.services.orders.validation.workflows.tomte.models.sample import TomteSample
+from cg.store.models import Application, Customer, User
+from cg.store.store import Store
+
+
+def create_tomte_sample(id: int) -> TomteSample:
+ return TomteSample(
+ name=f"name{id}",
+ application="RNAPOAR100",
+ container=ContainerEnum.plate,
+ container_name="ContainerName",
+ control=ControlEnum.not_control,
+ require_qc_ok=True,
+ reference_genome=GenomeVersion.HG19,
+ sex=SexEnum.female,
+ source="source",
+ status=StatusEnum.affected,
+ subject_id="subject1",
+ well_position=f"A:{id}",
+ volume=MINIMUM_VOLUME,
+ )
+
+
+def create_case(samples: list[TomteSample]) -> TomteCase:
+ return TomteCase(
+ name="name",
+ panels=[],
+ samples=samples,
+ )
+
+
+def create_tomte_order(cases: list[TomteCase]) -> TomteOrder:
+ order = TomteOrder(
+ delivery_type=TomteDeliveryType.FASTQ,
+ name="order_name",
+ project_type=OrderType.TOMTE,
+ customer="cust000",
+ cases=cases,
+ )
+ order._user_id = 1
+ order._generated_ticket_id = 123456
+ return order
+
+
+@pytest.fixture
+def case_with_samples_in_same_well() -> TomteCase:
+ sample_1: TomteSample = create_tomte_sample(1)
+ sample_2: TomteSample = create_tomte_sample(1)
+ return create_case([sample_1, sample_2])
+
+
+@pytest.fixture
+def sample_with_non_compatible_application() -> TomteSample:
+ sample: TomteSample = create_tomte_sample(1)
+ sample.application = "WGSPCFC030"
+ return sample
+
+
+@pytest.fixture
+def archived_application(base_store: Store) -> Application:
+ return base_store.add_application(
+ tag="archived_application",
+ prep_category="wts",
+ description="This is an archived_application",
+ percent_kth=100,
+ percent_reads_guaranteed=90,
+ is_archived=True,
+ )
+
+
+@pytest.fixture
+def application_tag_required_buffer() -> str:
+ return "WGSWPFR400"
+
+
+@pytest.fixture
+def valid_order() -> TomteOrder:
+ child: TomteSample = create_tomte_sample(1)
+ father: TomteSample = create_tomte_sample(2)
+ mother: TomteSample = create_tomte_sample(3)
+ grandfather: TomteSample = create_tomte_sample(4)
+ grandmother: TomteSample = create_tomte_sample(5)
+ case = create_case([child, father, mother, grandfather, grandmother])
+ return create_tomte_order([case])
+
+
+@pytest.fixture
+def order_with_samples_in_same_well(case_with_samples_in_same_well: TomteCase) -> TomteOrder:
+ return create_tomte_order([case_with_samples_in_same_well])
+
+
+@pytest.fixture
+def case_with_samples_with_repeated_names() -> TomteCase:
+ sample_1: TomteSample = create_tomte_sample(1)
+ sample_2: TomteSample = create_tomte_sample(1)
+ sample_1.name = sample_2.name
+ return create_case([sample_1, sample_2])
+
+
+@pytest.fixture
+def order_with_repeated_sample_names(
+ case_with_samples_with_repeated_names: TomteCase,
+) -> TomteOrder:
+ return create_tomte_order([case_with_samples_with_repeated_names])
+
+
+@pytest.fixture
+def case() -> TomteCase:
+ sample_1: TomteSample = create_tomte_sample(1)
+ sample_2: TomteSample = create_tomte_sample(2)
+ return create_case([sample_1, sample_2])
+
+
+@pytest.fixture
+def order_with_repeated_case_names(case: TomteCase) -> TomteOrder:
+ return create_tomte_order([case, case])
+
+
+@pytest.fixture
+def order_with_invalid_father_sex(case: TomteCase):
+ child: TomteSample = case.samples[0]
+ father: TomteSample = case.samples[1]
+ child.father = father.name
+ father.sex = SexEnum.female
+ return create_tomte_order([case])
+
+
+@pytest.fixture
+def order_with_father_in_wrong_case(case: TomteCase):
+ child: TomteSample = case.samples[0]
+ father: TomteSample = case.samples[1]
+ child.father = father.name
+ case.samples = [child]
+ return create_tomte_order([case])
+
+
+@pytest.fixture
+def order_with_sample_cycle():
+ child: TomteSample = create_tomte_sample(1)
+ father: TomteSample = create_tomte_sample(2)
+ mother: TomteSample = create_tomte_sample(3)
+ grandfather: TomteSample = create_tomte_sample(4)
+ grandmother: TomteSample = create_tomte_sample(5)
+
+ child.mother = mother.name
+ child.father = father.name
+
+ father.mother = grandmother.name
+ father.father = child.name # Cycle introduced here
+
+ case = create_case([child, father, mother, grandfather, grandmother])
+ return create_tomte_order([case])
+
+
+@pytest.fixture
+def order_with_existing_sample_cycle():
+ child: TomteSample = create_tomte_sample(1)
+ father = ExistingSample(internal_id="ExistingSampleInternalId", status=StatusEnum.unaffected)
+ mother: TomteSample = create_tomte_sample(3)
+ grandfather: TomteSample = create_tomte_sample(4)
+ grandmother: TomteSample = create_tomte_sample(5)
+
+ child.mother = mother.name
+ child.father = "ExistingSampleName"
+
+ father.mother = grandmother.name
+ father.father = child.name # Cycle introduced here
+
+ case = create_case([child, father, mother, grandfather, grandmother])
+ return create_tomte_order([case])
+
+
+@pytest.fixture
+def order_with_siblings_as_parents():
+ child: TomteSample = create_tomte_sample(1)
+
+ father: TomteSample = create_tomte_sample(3)
+ mother: TomteSample = create_tomte_sample(4)
+
+ grandfather: TomteSample = create_tomte_sample(5)
+ grandmother: TomteSample = create_tomte_sample(6)
+
+ child.father = father.name
+ child.mother = mother.name
+
+ father.mother = grandmother.name
+ father.father = grandfather.name
+
+ mother.mother = grandmother.name
+ mother.father = grandfather.name
+
+ case = create_case([child, father, mother, grandfather, grandmother])
+ return create_tomte_order([case])
+
+
+@pytest.fixture
+def sample_with_invalid_concentration():
+ sample: TomteSample = create_tomte_sample(1)
+ sample.concentration_ng_ul = 1
+ return sample
+
+
+@pytest.fixture
+def sample_with_missing_well_position():
+ sample: TomteSample = create_tomte_sample(1)
+ sample.well_position = None
+ return sample
+
+
+@pytest.fixture
+def application_with_concentration_interval(base_store: Store) -> Application:
+ application: Application = base_store.add_application(
+ tag="RNAPOAR100",
+ prep_category="wts",
+ description="This is an application with concentration interval",
+ percent_kth=100,
+ percent_reads_guaranteed=90,
+ sample_concentration_minimum=50,
+ sample_concentration_maximum=250,
+ )
+ application.order_types = [OrderType.TOMTE]
+ base_store.session.add(application)
+ base_store.commit_to_store()
+ return application
+
+
+@pytest.fixture
+def order_with_invalid_concentration(sample_with_invalid_concentration) -> TomteOrder:
+ case: TomteCase = create_case([sample_with_invalid_concentration])
+ order: TomteOrder = create_tomte_order([case])
+ order.skip_reception_control = True
+ return order
+
+
+@pytest.fixture
+def order_with_samples_having_same_names_as_cases() -> TomteOrder:
+ """Return an order with two cases, the first case having two samples named after the cases."""
+ sample_1: TomteSample = create_tomte_sample(1)
+ sample_2: TomteSample = create_tomte_sample(2)
+ sample_3: TomteSample = create_tomte_sample(3)
+ case_1: TomteCase = create_case([sample_1, sample_2])
+ case_1.name = sample_1.name
+ case_2: TomteCase = create_case([sample_3])
+ case_2.name = sample_2.name
+ return create_tomte_order([case_1, case_2])
+
+
+@pytest.fixture
+def sample_with_missing_container_name() -> TomteSample:
+ sample: TomteSample = create_tomte_sample(1)
+ sample.container_name = None
+ return sample
+
+
+@pytest.fixture
+def tomte_validation_service(
+ base_store: Store,
+ application_with_concentration_interval: Application,
+) -> OrderValidationService:
+ customer: Customer = base_store.get_customer_by_internal_id("cust000")
+ user: User = base_store.add_user(customer=customer, email="mail@email.com", name="new user")
+ base_store.session.add(user)
+ base_store.session.add(application_with_concentration_interval)
+ base_store.session.commit()
+ return OrderValidationService(base_store)
+
+
+@pytest.fixture
+def application_tgs(base_store: Store) -> Application:
+ application: Application = base_store.add_application(
+ tag="PANKTTR020",
+ prep_category=SeqLibraryPrepCategory.TARGETED_GENOME_SEQUENCING,
+ description="Panel-based sequencing, 20 M read pairs.",
+ percent_kth=59,
+ percent_reads_guaranteed=75,
+ )
+ base_store.session.add(application)
+ base_store.commit_to_store()
+ return application
+
+
+@pytest.fixture
+def tomte_rule_set() -> RuleSet:
+ return ORDER_TYPE_RULE_SET_MAP[OrderType.TOMTE]
diff --git a/tests/services/orders/validation_service/sample_rules/__init__.py b/tests/services/orders/validation_service/sample_rules/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/services/orders/validation_service/sample_rules/conftest.py b/tests/services/orders/validation_service/sample_rules/conftest.py
new file mode 100644
index 0000000000..60b91ebcc1
--- /dev/null
+++ b/tests/services/orders/validation_service/sample_rules/conftest.py
@@ -0,0 +1,74 @@
+import pytest
+
+from cg.models.orders.constants import OrderType
+from cg.models.orders.sample_base import ContainerEnum, PriorityEnum
+from cg.services.orders.validation.constants import MINIMUM_VOLUME, ElutionBuffer, ExtractionMethod
+from cg.services.orders.validation.workflows.microsalt.constants import MicrosaltDeliveryType
+from cg.services.orders.validation.workflows.microsalt.models.order import MicrosaltOrder
+from cg.services.orders.validation.workflows.microsalt.models.sample import MicrosaltSample
+from cg.store.models import Application
+from cg.store.store import Store
+
+
+def create_microsalt_sample(id: int) -> MicrosaltSample:
+ return MicrosaltSample(
+ name=f"name{id}",
+ application="MWRNXTR003",
+ container=ContainerEnum.plate,
+ container_name="ContainerName",
+ elution_buffer=ElutionBuffer.WATER,
+ extraction_method=ExtractionMethod.MAELSTROM,
+ organism="C. jejuni",
+ priority=PriorityEnum.standard,
+ require_qc_ok=True,
+ reference_genome="NC_00001",
+ well_position=f"A:{id}",
+ volume=MINIMUM_VOLUME,
+ )
+
+
+def create_microsalt_order(samples: list[MicrosaltSample]) -> MicrosaltOrder:
+ return MicrosaltOrder(
+ connect_to_ticket=True,
+ delivery_type=MicrosaltDeliveryType.FASTQ_QC,
+ name="order_name",
+ ticket_number="#12345",
+ project_type=OrderType.MICROSALT,
+ user_id=1,
+ customer="cust000",
+ samples=samples,
+ )
+
+
+@pytest.fixture
+def valid_microsalt_order() -> MicrosaltOrder:
+ sample_1: MicrosaltSample = create_microsalt_sample(1)
+ sample_2: MicrosaltSample = create_microsalt_sample(2)
+ sample_3: MicrosaltSample = create_microsalt_sample(3)
+ return create_microsalt_order([sample_1, sample_2, sample_3])
+
+
+@pytest.fixture
+def sample_with_non_compatible_application() -> MicrosaltSample:
+ sample: MicrosaltSample = create_microsalt_sample(1)
+ sample.application = "WGSPCFC030"
+ return sample
+
+
+@pytest.fixture
+def archived_application(base_store: Store) -> Application:
+ return base_store.add_application(
+ tag="archived_application",
+ prep_category="mic",
+ description="This is an archived_application",
+ percent_kth=100,
+ percent_reads_guaranteed=90,
+ is_archived=True,
+ )
+
+
+@pytest.fixture
+def order_with_samples_in_same_well() -> MicrosaltOrder:
+ sample_1: MicrosaltSample = create_microsalt_sample(1)
+ sample_2: MicrosaltSample = create_microsalt_sample(1)
+ return create_microsalt_order([sample_1, sample_2])
diff --git a/tests/services/orders/validation_service/sample_rules/test_data_validators.py b/tests/services/orders/validation_service/sample_rules/test_data_validators.py
new file mode 100644
index 0000000000..8eb2c5140c
--- /dev/null
+++ b/tests/services/orders/validation_service/sample_rules/test_data_validators.py
@@ -0,0 +1,85 @@
+from cg.services.orders.validation.constants import MAXIMUM_VOLUME
+from cg.services.orders.validation.errors.sample_errors import (
+ ApplicationArchivedError,
+ ApplicationNotCompatibleError,
+ ApplicationNotValidError,
+ InvalidVolumeError,
+)
+from cg.services.orders.validation.rules.sample.rules import (
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_applications_not_archived,
+ validate_volume_interval,
+)
+from cg.services.orders.validation.workflows.microsalt.models.order import MicrosaltOrder
+from cg.services.orders.validation.workflows.microsalt.models.sample import MicrosaltSample
+from cg.store.models import Application
+from cg.store.store import Store
+
+
+def test_applications_exist_sample_order(valid_microsalt_order: MicrosaltOrder, base_store: Store):
+
+ # GIVEN an order with a sample with an application which is not found in the database
+ valid_microsalt_order.samples[0].application = "Non-existent app tag"
+
+ # WHEN validating that the specified applications exist
+ errors = validate_application_exists(order=valid_microsalt_order, store=base_store)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the invalid application
+ assert isinstance(errors[0], ApplicationNotValidError)
+
+
+def test_application_is_incompatible(
+ valid_microsalt_order: MicrosaltOrder,
+ sample_with_non_compatible_application: MicrosaltSample,
+ base_store: Store,
+):
+
+ # GIVEN an order that has a sample with an application which is incompatible with microsalt
+ valid_microsalt_order.samples.append(sample_with_non_compatible_application)
+
+ # WHEN validating the order
+ errors = validate_application_compatibility(order=valid_microsalt_order, store=base_store)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should be about the application compatability
+ assert isinstance(errors[0], ApplicationNotCompatibleError)
+
+
+def test_application_is_not_archived(
+ valid_microsalt_order: MicrosaltOrder, archived_application: Application, base_store: Store
+):
+
+ # GIVEN an order with a new sample with an archived application
+ valid_microsalt_order.samples[0].application = archived_application.tag
+ base_store.session.add(archived_application)
+ base_store.commit_to_store()
+
+ # WHEN validating that the applications are not archived
+ errors = validate_applications_not_archived(order=valid_microsalt_order, store=base_store)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the archived application
+ assert isinstance(errors[0], ApplicationArchivedError)
+
+
+def test_invalid_volume(valid_microsalt_order: MicrosaltOrder, base_store: Store):
+
+ # GIVEN an order with a sample with an invalid volume
+ valid_microsalt_order.samples[0].volume = MAXIMUM_VOLUME + 10
+
+ # WHEN validating the volume interval
+ errors = validate_volume_interval(order=valid_microsalt_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the invalid volume
+ assert isinstance(errors[0], InvalidVolumeError)
diff --git a/tests/services/orders/validation_service/sample_rules/test_inter_field_validators.py b/tests/services/orders/validation_service/sample_rules/test_inter_field_validators.py
new file mode 100644
index 0000000000..0ffb7fd4ff
--- /dev/null
+++ b/tests/services/orders/validation_service/sample_rules/test_inter_field_validators.py
@@ -0,0 +1,50 @@
+from cg.services.orders.validation.errors.sample_errors import (
+ OccupiedWellError,
+ SampleNameRepeatedError,
+)
+from cg.services.orders.validation.rules.sample.rules import (
+ validate_sample_names_unique,
+ validate_wells_contain_at_most_one_sample,
+)
+from cg.services.orders.validation.workflows.microsalt.models.order import MicrosaltOrder
+
+
+def test_multiple_samples_in_well_not_allowed(order_with_samples_in_same_well: MicrosaltOrder):
+
+ # GIVEN an order with multiple samples in the same well
+
+ # WHEN validating the order
+ errors = validate_wells_contain_at_most_one_sample(order_with_samples_in_same_well)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should be about the well
+ assert isinstance(errors[0], OccupiedWellError)
+
+
+def test_order_without_multiple_samples_in_well(valid_microsalt_order: MicrosaltOrder):
+
+ # GIVEN a valid order with no samples in the same well
+
+ # WHEN validating the order
+ errors = validate_wells_contain_at_most_one_sample(valid_microsalt_order)
+
+ # THEN no errors should be returned
+ assert not errors
+
+
+def test_sample_name_repeated(valid_microsalt_order: MicrosaltOrder):
+
+ # GIVEN a valid order within sample names are repeated
+ sample_name_1 = valid_microsalt_order.samples[0].name
+ valid_microsalt_order.samples[1].name = sample_name_1
+
+ # WHEN validating that the sample names are unique
+ errors = validate_sample_names_unique(valid_microsalt_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the repeated sample name
+ assert isinstance(errors[0], SampleNameRepeatedError)
diff --git a/tests/services/orders/validation_service/sample_rules/test_sample_rules.py b/tests/services/orders/validation_service/sample_rules/test_sample_rules.py
new file mode 100644
index 0000000000..2d3bdf17ab
--- /dev/null
+++ b/tests/services/orders/validation_service/sample_rules/test_sample_rules.py
@@ -0,0 +1,453 @@
+from cg.models.orders.sample_base import ContainerEnum, ControlEnum, PriorityEnum
+from cg.services.orders.validation.constants import ElutionBuffer, IndexEnum
+from cg.services.orders.validation.errors.sample_errors import (
+ BufferInvalidError,
+ ConcentrationInvalidIfSkipRCError,
+ ConcentrationRequiredError,
+ ContainerNameMissingError,
+ ContainerNameRepeatedError,
+ IndexNumberMissingError,
+ IndexNumberOutOfRangeError,
+ IndexSequenceMismatchError,
+ IndexSequenceMissingError,
+ PoolApplicationError,
+ PoolPriorityError,
+ SampleNameNotAvailableControlError,
+ SampleNameNotAvailableError,
+ VolumeRequiredError,
+ WellFormatError,
+ WellFormatRmlError,
+)
+from cg.services.orders.validation.index_sequences import INDEX_SEQUENCES
+from cg.services.orders.validation.rules.sample.rules import (
+ validate_buffer_skip_rc_condition,
+ validate_concentration_interval_if_skip_rc,
+ validate_concentration_required_if_skip_rc,
+ validate_container_name_required,
+ validate_index_number_in_range,
+ validate_index_number_required,
+ validate_index_sequence_mismatch,
+ validate_index_sequence_required,
+ validate_non_control_sample_names_available,
+ validate_pools_contain_one_application,
+ validate_pools_contain_one_priority,
+ validate_sample_names_available,
+ validate_tube_container_name_unique,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_position_rml_format,
+)
+from cg.services.orders.validation.workflows.fastq.models.order import FastqOrder
+from cg.services.orders.validation.workflows.fluffy.models.order import FluffyOrder
+from cg.services.orders.validation.workflows.microsalt.models.order import MicrosaltOrder
+from cg.services.orders.validation.workflows.mutant.models.order import MutantOrder
+from cg.services.orders.validation.workflows.rml.models.order import RmlOrder
+from cg.services.orders.validation.workflows.rml.models.sample import RmlSample
+from cg.store.models import Sample
+from cg.store.store import Store
+from tests.store_helpers import StoreHelpers
+
+
+def test_sample_names_available(valid_microsalt_order: MicrosaltOrder, sample_store: Store):
+
+ # GIVEN an order with a sample name reused from a previous order
+ sample = sample_store.session.query(Sample).first()
+ valid_microsalt_order.customer = sample.customer.internal_id
+ valid_microsalt_order.samples[0].name = sample.name
+
+ # WHEN validating that the sample names are available to the customer
+ errors = validate_sample_names_available(order=valid_microsalt_order, store=sample_store)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the reused sample name
+ assert isinstance(errors[0], SampleNameNotAvailableError)
+
+
+def test_validate_tube_container_name_unique(valid_microsalt_order: MicrosaltOrder):
+
+ # GIVEN an order with three samples in tubes with 2 reused container names
+ valid_microsalt_order.samples[0].container = ContainerEnum.tube
+ valid_microsalt_order.samples[1].container = ContainerEnum.tube
+ valid_microsalt_order.samples[2].container = ContainerEnum.tube
+ valid_microsalt_order.samples[0].container_name = "container_name"
+ valid_microsalt_order.samples[1].container_name = "container_name"
+ valid_microsalt_order.samples[2].container_name = "ContainerName"
+
+ # WHEN validating the container names are unique
+ errors = validate_tube_container_name_unique(order=valid_microsalt_order)
+
+ # THEN the error should concern the reused container name
+ assert isinstance(errors[0], ContainerNameRepeatedError)
+ assert errors[0].sample_index == 0
+ assert errors[1].sample_index == 1
+
+
+def test_validate_sample_names_available(
+ fluffy_order: FluffyOrder, store: Store, helpers: StoreHelpers
+):
+ """
+ Test that an order without any control sample that has a sample name already existing in the
+ database returns an error.
+ """
+
+ # GIVEN an order without control with a sample name already in the database
+ sample_name: str = fluffy_order.samples[0].name
+ helpers.add_sample(
+ store=store,
+ name=sample_name,
+ customer_id=fluffy_order.customer,
+ )
+
+ # WHEN validating that the sample names are available to the customer
+ errors = validate_sample_names_available(order=fluffy_order, store=store)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the reused sample name
+ assert isinstance(errors[0], SampleNameNotAvailableError)
+
+
+def test_validate_non_control_sample_names_available(
+ mutant_order: MutantOrder, store: Store, helpers: StoreHelpers
+):
+ """
+ Test that an order with a control sample name already existing in the database returns no error.
+ """
+
+ # GIVEN an order with a control sample
+ sample = mutant_order.samples[0]
+ assert sample.control == ControlEnum.positive
+
+ # GIVEN that there is a sample in the database with the same name
+ helpers.add_sample(
+ store=store,
+ name=sample.name,
+ customer_id=mutant_order.customer,
+ )
+
+ # WHEN validating that the sample names are available to the customer
+ errors = validate_non_control_sample_names_available(order=mutant_order, store=store)
+
+ # THEN no error should be returned because it is a control sample
+ assert not errors
+
+
+def test_validate_non_control_sample_names_available_non_control_sample_name(
+ mutant_order: MutantOrder, store: Store, helpers: StoreHelpers
+):
+ """
+ Test that an order with a non-control sample name already existing in the database returns an
+ error.
+ """
+
+ # GIVEN an order with a non-control sample
+ sample = mutant_order.samples[2]
+ assert sample.control == ControlEnum.not_control
+
+ # GIVEN that there is a sample in the database with the same name
+ helpers.add_sample(
+ store=store,
+ name=sample.name,
+ customer_id=mutant_order.customer,
+ )
+
+ # WHEN validating that the sample names are available to the customer
+ errors = validate_non_control_sample_names_available(order=mutant_order, store=store)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the reused sample name
+ assert isinstance(errors[0], SampleNameNotAvailableControlError)
+
+
+def test_validate_well_position_format(valid_microsalt_order: MicrosaltOrder):
+
+ # GIVEN an order with a sample with an invalid well position
+ valid_microsalt_order.samples[0].well_position = "J:4"
+
+ # WHEN validating the well position format
+ errors = validate_well_position_format(order=valid_microsalt_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the invalid well position
+ assert isinstance(errors[0], WellFormatError)
+ assert errors[0].sample_index == 0
+
+
+def test_validate_well_position_rml_format(rml_order: RmlOrder):
+
+ # GIVEN a RML order with a sample with an invalid well position
+ rml_order.samples[0].well_position_rml = "J:4"
+
+ # WHEN validating the well position format
+ errors = validate_well_position_rml_format(order=rml_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the invalid well position
+ assert isinstance(errors[0], WellFormatRmlError)
+ assert errors[0].sample_index == 0
+
+
+def test_validate_missing_container_name(valid_microsalt_order: MicrosaltOrder):
+
+ # GIVEN an order with a sample on a plate with no container name
+ valid_microsalt_order.samples[0].container = ContainerEnum.plate
+ valid_microsalt_order.samples[0].container_name = None
+
+ # WHEN validating the container name
+ errors = validate_container_name_required(order=valid_microsalt_order)
+
+ # THEN am error should be returned
+ assert errors
+
+ # THEN the error should concern the missing container name
+ assert isinstance(errors[0], ContainerNameMissingError)
+ assert errors[0].sample_index == 0
+
+
+def test_validate_valid_container_name(valid_microsalt_order: MicrosaltOrder):
+
+ # GIVEN an order with a sample on a plate with a valid container name
+ valid_microsalt_order.samples[0].container = ContainerEnum.plate
+ valid_microsalt_order.samples[0].container_name = "Plate_123"
+
+ # WHEN validating the container name
+ errors = validate_container_name_required(order=valid_microsalt_order)
+
+ # THEN no error should be returned
+ assert not errors
+
+
+def test_validate_non_plate_container(valid_microsalt_order: MicrosaltOrder):
+
+ # GIVEN an order with missing container names but the samples are not on plates
+ valid_microsalt_order.samples[0].container = ContainerEnum.tube
+ valid_microsalt_order.samples[0].container_name = None
+
+ valid_microsalt_order.samples[1].container = ContainerEnum.no_container
+ valid_microsalt_order.samples[1].container_name = None
+
+ # WHEN validating the container name
+ errors = validate_container_name_required(order=valid_microsalt_order)
+
+ # THEN no error should be returned
+ assert not errors
+
+
+def test_missing_required_sample_volume(valid_microsalt_order: MicrosaltOrder):
+
+ # GIVEN an order with containerized samples missing volume
+ valid_microsalt_order.samples[0].container = ContainerEnum.tube
+ valid_microsalt_order.samples[0].volume = None
+
+ valid_microsalt_order.samples[1].container = ContainerEnum.plate
+ valid_microsalt_order.samples[1].volume = None
+
+ # WHEN validating the volume
+ errors = validate_volume_required(order=valid_microsalt_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the missing volume
+ assert isinstance(errors[0], VolumeRequiredError)
+ assert errors[0].sample_index == 0
+
+ assert isinstance(errors[1], VolumeRequiredError)
+ assert errors[1].sample_index == 1
+
+
+def test_non_required_sample_volume(valid_microsalt_order: MicrosaltOrder):
+
+ # GIVEN an order with a sample not in a container and no volume set
+ valid_microsalt_order.samples[0].container = ContainerEnum.no_container
+ valid_microsalt_order.samples[0].volume = None
+
+ # WHEN validating the volume
+ errors = validate_volume_required(order=valid_microsalt_order)
+
+ # THEN no error should be returned
+ assert not errors
+
+
+def test_validate_concentration_required_if_skip_rc(fastq_order: FastqOrder):
+
+ # GIVEN a fastq order trying to skip reception control
+ fastq_order.skip_reception_control = True
+
+ # GIVEN that one of its samples has no concentration set
+ fastq_order.samples[0].concentration_ng_ul = None
+
+ # WHEN validating that the concentration is not missing
+ errors: list[ConcentrationRequiredError] = validate_concentration_required_if_skip_rc(
+ order=fastq_order
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the missing concentration
+ assert isinstance(errors[0], ConcentrationRequiredError)
+
+
+def test_validate_concentration_interval_if_skip_rc(fastq_order: FastqOrder, base_store: Store):
+
+ # GIVEN a Fastq order trying to skip reception control
+ fastq_order.skip_reception_control = True
+
+ # GIVEN that one of the samples has a concentration outside the allowed interval for its
+ # application
+ sample = fastq_order.samples[0]
+ application = base_store.get_application_by_tag(sample.application)
+ application.sample_concentration_minimum = sample.concentration_ng_ul + 1
+ base_store.session.add(application)
+ base_store.commit_to_store()
+
+ # WHEN validating that the order's samples' concentrations are within allowed intervals
+ errors: list[ConcentrationInvalidIfSkipRCError] = validate_concentration_interval_if_skip_rc(
+ order=fastq_order, store=base_store
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the concentration level
+ assert isinstance(errors[0], ConcentrationInvalidIfSkipRCError)
+
+
+def test_validate_buffer_skip_rc_condition(fastq_order: FastqOrder):
+
+ # GIVEN a Fastq order trying to skip reception control
+ fastq_order.skip_reception_control = True
+
+ # GIVEN that one of the samples has buffer specified as 'other'
+ fastq_order.samples[0].elution_buffer = ElutionBuffer.OTHER
+
+ # WHEN validating that the buffers follow the 'skip reception control' requirements
+ errors: list[BufferInvalidError] = validate_buffer_skip_rc_condition(order=fastq_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the buffer
+ assert isinstance(errors[0], BufferInvalidError)
+
+
+def test_validate_pools_contain_multiple_applications(rml_order: RmlOrder):
+
+ # GIVEN a pooled order with the same pool containing multiple applications
+ rml_order.samples[0].pool = "pool"
+ rml_order.samples[1].pool = "pool"
+ _, samples = next(iter(rml_order.pools.items()))
+ samples[1].application = f"Not {samples[0].application}"
+
+ # WHEN validating that the pools contain a single application
+ errors: list[PoolApplicationError] = validate_pools_contain_one_application(rml_order)
+
+ # THEN errors should be returned
+ assert errors
+
+ # THEN the errors should concern the pool with repeated applications
+ assert isinstance(errors[0], PoolApplicationError)
+ assert len(errors) == len(samples)
+
+
+def test_validate_pools_contain_multiple_priorities(rml_order: RmlOrder):
+
+ # GIVEN a pooled order with the same pool containing multiple priorities
+ rml_order.samples[0].pool = "pool"
+ rml_order.samples[1].pool = "pool"
+ _, samples = next(iter(rml_order.pools.items()))
+ samples[0].priority = PriorityEnum.research
+ samples[1].priority = PriorityEnum.priority
+
+ # WHEN validating that the pools contain a single application
+ errors: list[PoolPriorityError] = validate_pools_contain_one_priority(rml_order)
+
+ # THEN errors should be returned
+ assert errors
+
+ # THEN the errors should concern the pool with repeated applications
+ assert isinstance(errors[0], PoolPriorityError)
+ assert len(errors) == len(samples)
+
+
+def test_validate_missing_index_number(rml_order: RmlOrder):
+
+ # GIVEN an indexed order with a missing index number
+ erroneous_sample: RmlSample = rml_order.samples[0]
+ erroneous_sample.index = IndexEnum.AVIDA_INDEX_STRIP
+ erroneous_sample.index_number = None
+
+ # WHEN validating that no index numbers are missing
+ errors: list[IndexNumberMissingError] = validate_index_number_required(rml_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the sample's missing index number
+ assert isinstance(errors[0], IndexNumberMissingError)
+ assert errors[0].sample_index == 0
+
+
+def test_validate_index_number_out_of_range(rml_order: RmlOrder):
+
+ # GIVEN an indexed order with an index number out of range
+ erroneous_sample: RmlSample = rml_order.samples[0]
+ erroneous_sample.index = IndexEnum.AVIDA_INDEX_STRIP
+ erroneous_sample.index_number = len(INDEX_SEQUENCES[erroneous_sample.index]) + 1
+
+ # WHEN validating that the index numbers are in range
+ errors: list[IndexNumberOutOfRangeError] = validate_index_number_in_range(rml_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the sample's index number being out of range
+ assert isinstance(errors[0], IndexNumberOutOfRangeError)
+ assert errors[0].sample_index == 0
+
+
+def test_validate_missing_index_sequence(rml_order: RmlOrder):
+
+ # GIVEN an indexed order with a missing index sequence
+ erroneous_sample: RmlSample = rml_order.samples[0]
+ erroneous_sample.index = IndexEnum.AVIDA_INDEX_STRIP
+ erroneous_sample.index_sequence = None
+
+ # WHEN validating that no index sequences are missing
+ errors: list[IndexSequenceMissingError] = validate_index_sequence_required(rml_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the sample's missing index sequence
+ assert isinstance(errors[0], IndexSequenceMissingError)
+ assert errors[0].sample_index == 0
+
+
+def test_validate_index_sequence_mismatch(rml_order: RmlOrder):
+
+ # GIVEN an indexed order with a mismatched index sequence
+ erroneous_sample: RmlSample = rml_order.samples[0]
+ erroneous_sample.index = IndexEnum.AVIDA_INDEX_STRIP
+ erroneous_sample.index_number = 1
+ erroneous_sample.index_sequence = INDEX_SEQUENCES[erroneous_sample.index][10]
+
+ # WHEN validating that the index sequences match
+ errors: list[IndexSequenceMismatchError] = validate_index_sequence_mismatch(rml_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the sample's mismatched index sequence
+ assert isinstance(errors[0], IndexSequenceMismatchError)
+ assert errors[0].sample_index == 0
diff --git a/tests/services/orders/validation_service/test_case_rules.py b/tests/services/orders/validation_service/test_case_rules.py
new file mode 100644
index 0000000000..b86a1d3d06
--- /dev/null
+++ b/tests/services/orders/validation_service/test_case_rules.py
@@ -0,0 +1,140 @@
+from cg.constants import GenePanelMasterList
+from cg.models.orders.sample_base import ContainerEnum, SexEnum
+from cg.services.orders.validation.errors.case_errors import (
+ CaseDoesNotExistError,
+ CaseNameNotAvailableError,
+ CaseOutsideOfCollaborationError,
+ MultipleSamplesInCaseError,
+ RepeatedCaseNameError,
+)
+from cg.services.orders.validation.models.existing_case import ExistingCase
+from cg.services.orders.validation.models.order_with_cases import OrderWithCases
+from cg.services.orders.validation.rules.case.rules import (
+ validate_case_internal_ids_exist,
+ validate_case_names_available,
+ validate_case_names_not_repeated,
+ validate_existing_cases_belong_to_collaboration,
+ validate_one_sample_per_case,
+)
+from cg.services.orders.validation.workflows.mip_dna.models.order import MipDnaOrder
+from cg.services.orders.validation.workflows.rna_fusion.models.order import RnaFusionOrder
+from cg.services.orders.validation.workflows.rna_fusion.models.sample import RnaFusionSample
+from cg.store.models import Case
+from cg.store.store import Store
+
+
+def test_case_name_not_available(
+ valid_order: OrderWithCases, store_with_multiple_cases_and_samples: Store
+):
+ store = store_with_multiple_cases_and_samples
+
+ # GIVEN an order with a new case that has the same name as an existing case
+ case: Case = store.get_cases()[0]
+ valid_order.cases[0].name = case.name
+ valid_order.customer = case.customer.internal_id
+
+ # WHEN validating that the case name is available
+ errors: list[CaseNameNotAvailableError] = validate_case_names_available(
+ order=valid_order, store=store
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the case name
+ assert isinstance(errors[0], CaseNameNotAvailableError)
+
+
+def test_case_internal_ids_does_not_exist(
+ valid_order: OrderWithCases,
+ store_with_multiple_cases_and_samples: Store,
+):
+
+ # GIVEN an order with a case marked as existing but which does not exist in the database
+ existing_case = ExistingCase(internal_id="Non-existent case", panels=[GenePanelMasterList.AID])
+ valid_order.cases.append(existing_case)
+
+ # WHEN validating that the internal ids match existing cases
+ errors: list[CaseDoesNotExistError] = validate_case_internal_ids_exist(
+ order=valid_order,
+ store=store_with_multiple_cases_and_samples,
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the non-existent case
+ assert isinstance(errors[0], CaseDoesNotExistError)
+
+
+def test_repeated_case_names_not_allowed(order_with_repeated_case_names: OrderWithCases):
+ # GIVEN an order with cases with the same name
+
+ # WHEN validating the order
+ errors: list[RepeatedCaseNameError] = validate_case_names_not_repeated(
+ order_with_repeated_case_names
+ )
+
+ # THEN errors are returned
+ assert errors
+
+ # THEN the errors are about the case names
+ assert isinstance(errors[0], RepeatedCaseNameError)
+
+
+def test_multiple_samples_in_case(rnafusion_order: RnaFusionOrder):
+ # GIVEN an RNAFusion order with multiple samples in the same case
+ rnafusion_sample = RnaFusionSample(
+ container=ContainerEnum.tube,
+ container_name="container_name",
+ application="DummyAppTag",
+ name="ExtraSample",
+ require_qc_ok=False,
+ sex=SexEnum.female,
+ source="blood",
+ subject_id="subject",
+ )
+ rnafusion_order.cases[0].samples.append(rnafusion_sample)
+
+ # WHEN validating that the order has at most one sample per case
+ errors: list[MultipleSamplesInCaseError] = validate_one_sample_per_case(rnafusion_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the multiple samples in the first case
+ assert isinstance(errors[0], MultipleSamplesInCaseError)
+ assert errors[0].case_index == 0
+
+
+def test_case_outside_of_collaboration(
+ mip_dna_order: MipDnaOrder, store_with_multiple_cases_and_samples: Store
+):
+
+ # GIVEN a customer from outside the order's customer's collaboration
+ new_customer = store_with_multiple_cases_and_samples.add_customer(
+ internal_id="NewCustomer",
+ name="New customer",
+ invoice_address="Test street",
+ invoice_reference="Invoice reference",
+ )
+ store_with_multiple_cases_and_samples.add_item_to_store(new_customer)
+ store_with_multiple_cases_and_samples.commit_to_store()
+
+ # GIVEN a case belonging to the customer is added to the order
+ existing_cases: list[Case] = store_with_multiple_cases_and_samples.get_cases()
+ case = existing_cases[0]
+ case.customer = new_customer
+ existing_case = ExistingCase(internal_id=case.internal_id, panels=case.panels)
+ mip_dna_order.cases.append(existing_case)
+
+ # WHEN validating that the order does not contain cases from outside the customer's collaboration
+ errors: list[CaseOutsideOfCollaborationError] = validate_existing_cases_belong_to_collaboration(
+ order=mip_dna_order, store=store_with_multiple_cases_and_samples
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the added existing case
+ assert isinstance(errors[0], CaseOutsideOfCollaborationError)
diff --git a/tests/services/orders/validation_service/test_case_sample_rules.py b/tests/services/orders/validation_service/test_case_sample_rules.py
new file mode 100644
index 0000000000..692f5170f0
--- /dev/null
+++ b/tests/services/orders/validation_service/test_case_sample_rules.py
@@ -0,0 +1,602 @@
+import pytest
+
+from cg.models.orders.sample_base import ContainerEnum, SexEnum, StatusEnum
+from cg.services.orders.validation.errors.case_sample_errors import (
+ ApplicationArchivedError,
+ ApplicationNotCompatibleError,
+ ApplicationNotValidError,
+ BufferMissingError,
+ ConcentrationRequiredIfSkipRCError,
+ ContainerNameMissingError,
+ ContainerNameRepeatedError,
+ InvalidBufferError,
+ InvalidConcentrationIfSkipRCError,
+ InvalidVolumeError,
+ OccupiedWellError,
+ SampleDoesNotExistError,
+ SampleNameRepeatedError,
+ SampleNameSameAsCaseNameError,
+ SampleOutsideOfCollaborationError,
+ SexSubjectIdError,
+ StatusUnknownError,
+ SubjectIdSameAsCaseNameError,
+ SubjectIdSameAsSampleNameError,
+ VolumeRequiredError,
+ WellFormatError,
+ WellPositionMissingError,
+)
+from cg.services.orders.validation.models.existing_case import ExistingCase
+from cg.services.orders.validation.models.existing_sample import ExistingSample
+from cg.services.orders.validation.models.order_with_cases import OrderWithCases
+from cg.services.orders.validation.rules.case_sample.rules import (
+ validate_application_compatibility,
+ validate_application_exists,
+ validate_application_not_archived,
+ validate_buffer_required,
+ validate_buffers_are_allowed,
+ validate_concentration_interval_if_skip_rc,
+ validate_concentration_required_if_skip_rc,
+ validate_container_name_required,
+ validate_existing_samples_belong_to_collaboration,
+ validate_not_all_samples_unknown_in_case,
+ validate_sample_names_different_from_case_names,
+ validate_sample_names_not_repeated,
+ validate_samples_exist,
+ validate_subject_ids_different_from_case_names,
+ validate_subject_ids_different_from_sample_names,
+ validate_subject_sex_consistency,
+ validate_tube_container_name_unique,
+ validate_volume_interval,
+ validate_volume_required,
+ validate_well_position_format,
+ validate_well_positions_required,
+ validate_wells_contain_at_most_one_sample,
+)
+from cg.services.orders.validation.workflows.mip_dna.models.order import MipDnaOrder
+from cg.services.orders.validation.workflows.tomte.models.order import TomteOrder
+from cg.services.orders.validation.workflows.tomte.models.sample import TomteSample
+from cg.store.models import Application, Sample
+from cg.store.store import Store
+
+
+def test_validate_well_position_format(valid_order: OrderWithCases):
+
+ # GIVEN an order with invalid well position format
+ valid_order.cases[0].samples[0].well_position = "D:0"
+
+ # WHEN validating the well position format
+ errors: list[WellFormatError] = validate_well_position_format(order=valid_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the invalid well position format
+ assert isinstance(errors[0], WellFormatError)
+ assert errors[0].sample_index == 0 and errors[0].case_index == 0
+
+
+def test_validate_tube_container_name_unique(valid_order: OrderWithCases):
+
+ # GIVEN an order with two samples with the same tube container name
+ valid_order.cases[0].samples[0].container = ContainerEnum.tube
+ valid_order.cases[0].samples[1].container = ContainerEnum.tube
+ valid_order.cases[0].samples[0].container_name = "tube_name"
+ valid_order.cases[0].samples[1].container_name = "tube_name"
+
+ # WHEN validating the tube container name uniqueness
+ errors: list[ContainerNameRepeatedError] = validate_tube_container_name_unique(
+ order=valid_order
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the non-unique tube container name
+ assert isinstance(errors[0], ContainerNameRepeatedError)
+ assert errors[0].sample_index == 0 and errors[0].case_index == 0
+
+
+def test_applications_exist(valid_order: OrderWithCases, base_store: Store):
+ # GIVEN an order where one of the samples has an invalid application
+ for case in valid_order.cases:
+ case.samples[0].application = "Invalid application"
+
+ # WHEN validating the order
+ errors: list[ApplicationNotValidError] = validate_application_exists(
+ order=valid_order, store=base_store
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should be about the ticket number
+ assert isinstance(errors[0], ApplicationNotValidError)
+
+
+def test_applications_not_archived(
+ valid_order: OrderWithCases, base_store: Store, archived_application: Application
+):
+ # GIVEN an order where one of the samples has an invalid application
+ base_store.session.add(archived_application)
+ base_store.commit_to_store()
+ for case in valid_order.cases:
+ case.samples[0].application = archived_application.tag
+
+ # WHEN validating the order
+ errors: list[ApplicationArchivedError] = validate_application_not_archived(
+ order=valid_order, store=base_store
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the archived application
+ assert isinstance(errors[0], ApplicationArchivedError)
+
+
+def test_missing_required_volume(valid_order: OrderWithCases):
+
+ # GIVEN an orders with two samples with missing volumes
+ valid_order.cases[0].samples[0].container = ContainerEnum.tube
+ valid_order.cases[0].samples[0].volume = None
+
+ valid_order.cases[0].samples[1].container = ContainerEnum.plate
+ valid_order.cases[0].samples[1].volume = None
+
+ # WHEN validating that required volumes are set
+ errors: list[VolumeRequiredError] = validate_volume_required(order=valid_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the errors should concern the missing volumes
+ assert isinstance(errors[0], VolumeRequiredError)
+ assert errors[0].sample_index == 0 and errors[0].case_index == 0
+
+ assert isinstance(errors[1], VolumeRequiredError)
+ assert errors[1].sample_index == 1 and errors[1].case_index == 0
+
+
+def test_sample_internal_ids_does_not_exist(
+ valid_order: OrderWithCases,
+ base_store: Store,
+ store_with_multiple_cases_and_samples: Store,
+):
+
+ # GIVEN an order with a sample marked as existing but which does not exist in the database
+ existing_sample = ExistingSample(internal_id="Non-existent sample", status=StatusEnum.unknown)
+ valid_order.cases[0].samples.append(existing_sample)
+
+ # WHEN validating that the samples exists
+ errors: list[SampleDoesNotExistError] = validate_samples_exist(
+ order=valid_order, store=store_with_multiple_cases_and_samples
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the non-existent sample
+ assert isinstance(errors[0], SampleDoesNotExistError)
+
+
+def test_application_is_incompatible(
+ valid_order: TomteOrder, sample_with_non_compatible_application: TomteSample, base_store: Store
+):
+
+ # GIVEN an order that has a sample with an application which is incompatible with the workflow
+ valid_order.cases[0].samples.append(sample_with_non_compatible_application)
+
+ # WHEN validating the order
+ errors: list[ApplicationNotCompatibleError] = validate_application_compatibility(
+ order=valid_order, store=base_store
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should be about the application compatability
+ assert isinstance(errors[0], ApplicationNotCompatibleError)
+
+
+def test_subject_ids_same_as_case_names_not_allowed(valid_order: TomteOrder):
+
+ # GIVEN an order with a sample having its subject_id same as the case's name
+ case_name = valid_order.cases[0].name
+ valid_order.cases[0].samples[0].subject_id = case_name
+
+ # WHEN validating that no subject ids are the same as the case name
+ errors: list[SubjectIdSameAsCaseNameError] = validate_subject_ids_different_from_case_names(
+ valid_order
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should be concerning the subject id being the same as the case name
+ assert isinstance(errors[0], SubjectIdSameAsCaseNameError)
+
+
+def test_well_position_missing(
+ valid_order: TomteOrder, sample_with_missing_well_position: TomteSample
+):
+ # GIVEN an order with a sample with a missing well position
+ valid_order.cases[0].samples.append(sample_with_missing_well_position)
+
+ # WHEN validating that no well positions are missing
+ errors: list[WellPositionMissingError] = validate_well_positions_required(valid_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the missing well position
+ assert isinstance(errors[0], WellPositionMissingError)
+
+
+def test_container_name_missing(
+ valid_order: TomteOrder, sample_with_missing_container_name: TomteSample
+):
+
+ # GIVEN an order with a sample missing its container name
+ valid_order.cases[0].samples.append(sample_with_missing_container_name)
+
+ # WHEN validating that it is not missing any container names
+ errors: list[ContainerNameMissingError] = validate_container_name_required(order=valid_order)
+
+ # THEN an error should be raised
+ assert errors
+
+ # THEN the error should concern the missing container name
+ assert isinstance(errors[0], ContainerNameMissingError)
+
+
+@pytest.mark.parametrize("sample_volume", [1, 200], ids=["Too low", "Too high"])
+def test_volume_out_of_bounds(valid_order: TomteOrder, sample_volume: int):
+
+ # GIVEN an order containing a sample with an invalid volume
+ valid_order.cases[0].samples[0].volume = sample_volume
+
+ # WHEN validating that the volume is within bounds
+ errors: list[InvalidVolumeError] = validate_volume_interval(valid_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the invalid volume
+ assert isinstance(errors[0], InvalidVolumeError)
+
+
+def test_multiple_samples_in_well_not_allowed(order_with_samples_in_same_well: OrderWithCases):
+
+ # GIVEN an order with multiple samples in the same well
+
+ # WHEN validating the order
+ errors: list[OccupiedWellError] = validate_wells_contain_at_most_one_sample(
+ order_with_samples_in_same_well
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should be about the well
+ assert isinstance(errors[0], OccupiedWellError)
+
+
+def test_repeated_sample_names_not_allowed(
+ order_with_repeated_sample_names: OrderWithCases, base_store: Store
+):
+ # GIVEN an order with samples in a case with the same name
+
+ # WHEN validating the order
+ errors: list[SampleNameRepeatedError] = validate_sample_names_not_repeated(
+ order=order_with_repeated_sample_names, store=base_store
+ )
+
+ # THEN errors are returned
+ assert errors
+
+ # THEN the errors are about the sample names
+ assert isinstance(errors[0], SampleNameRepeatedError)
+
+
+def test_elution_buffer_is_not_allowed(valid_order: TomteOrder):
+
+ # GIVEN an order with 'skip reception control' toggled but no buffers specfied
+ valid_order.skip_reception_control = True
+
+ # WHEN validating that the buffers conform to the 'skip reception control' requirements
+ errors: list[InvalidBufferError] = validate_buffers_are_allowed(valid_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should be about the buffer compatability
+ assert isinstance(errors[0], InvalidBufferError)
+
+
+def test_subject_id_same_as_sample_name_is_not_allowed(valid_order: TomteOrder):
+
+ # GIVEN an order with a sample with same name and subject id
+ sample_name = valid_order.cases[0].samples[0].name
+ valid_order.cases[0].samples[0].subject_id = sample_name
+
+ # WHEN validating that the subject ids are different from the sample names
+ errors: list[SubjectIdSameAsSampleNameError] = validate_subject_ids_different_from_sample_names(
+ valid_order
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should be about the subject id being the same as the sample name
+ assert isinstance(errors[0], SubjectIdSameAsSampleNameError)
+
+
+def test_concentration_required_if_skip_rc(valid_order: OrderWithCases):
+ # GIVEN an order with missing concentration trying to skip reception control
+ valid_order.skip_reception_control = True
+
+ # WHEN validating that concentration is provided
+ errors: list[ConcentrationRequiredIfSkipRCError] = validate_concentration_required_if_skip_rc(
+ valid_order
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the missing concentration
+ assert isinstance(errors[0], ConcentrationRequiredIfSkipRCError)
+
+
+def test_concentration_not_within_interval_if_skip_rc(
+ order_with_invalid_concentration: TomteOrder,
+ sample_with_invalid_concentration: TomteSample,
+ base_store: Store,
+ application_with_concentration_interval: Application,
+):
+
+ # GIVEN an order skipping reception control
+ # GIVEN that the order has a sample with invalid concentration for its application
+ base_store.session.add(application_with_concentration_interval)
+ base_store.session.commit()
+
+ # WHEN validating that the concentration is within the allowed interval
+ errors: list[InvalidConcentrationIfSkipRCError] = validate_concentration_interval_if_skip_rc(
+ order=order_with_invalid_concentration, store=base_store
+ )
+
+ # THEN an error is returned
+ assert errors
+
+ # THEN the error should concern the application interval
+ assert isinstance(errors[0], InvalidConcentrationIfSkipRCError)
+
+
+def test_missing_volume_no_container(valid_order: OrderWithCases):
+
+ # GIVEN an order with a sample with missing volume, but which is in no container
+ valid_order.cases[0].samples[0].container = ContainerEnum.no_container
+ valid_order.cases[0].samples[0].volume = None
+
+ # WHEN validating that the order has required volumes set
+ errors: list[VolumeRequiredError] = validate_volume_required(order=valid_order)
+
+ # THEN no error should be returned
+ assert not errors
+
+
+def test_validate_sex_subject_id_clash(valid_order: OrderWithCases, sample_store: Store):
+ # GIVEN an existing sample
+ sample = sample_store.session.query(Sample).first()
+
+ # GIVEN an order and sample with the same customer and subject id
+ valid_order.customer = sample.customer.internal_id
+ valid_order.cases[0].samples[0].subject_id = "subject"
+ sample.subject_id = "subject"
+
+ # GIVEN a sample in the order that has a different sex
+ valid_order.cases[0].samples[0].sex = SexEnum.female
+ sample.sex = SexEnum.male
+
+ # WHEN validating the order
+ errors: list[SexSubjectIdError] = validate_subject_sex_consistency(
+ order=valid_order,
+ store=sample_store,
+ )
+
+ # THEN an error should be given for the clash
+ assert errors
+ assert isinstance(errors[0], SexSubjectIdError)
+
+
+def test_validate_sex_subject_id_no_clash(valid_order: OrderWithCases, sample_store: Store):
+ # GIVEN an existing sample
+ sample = sample_store.session.query(Sample).first()
+
+ # GIVEN an order and sample with the same customer and subject id
+ valid_order.customer = sample.customer.internal_id
+ valid_order.cases[0].samples[0].subject_id = "subject"
+ sample.subject_id = "subject"
+
+ # GIVEN that the order's sample has a matching sex to the one in StatusDB
+ valid_order.cases[0].samples[0].sex = SexEnum.female
+ sample.sex = SexEnum.female
+
+ # WHEN validating the order
+ errors: list[SexSubjectIdError] = validate_subject_sex_consistency(
+ order=valid_order,
+ store=sample_store,
+ )
+
+ # THEN no error should be returned
+ assert not errors
+
+
+def test_validate_sex_subject_id_existing_sex_unknown(
+ valid_order: OrderWithCases, sample_store: Store
+):
+ # GIVEN an existing sample
+ sample = sample_store.session.query(Sample).first()
+
+ # GIVEN an order and sample with the same customer and subject id
+ valid_order.customer = sample.customer.internal_id
+ valid_order.cases[0].samples[0].subject_id = "subject"
+ sample.subject_id = "subject"
+
+ # GIVEN a sample in the order that has a known sex and the existing sample's sex is unknown
+ valid_order.cases[0].samples[0].sex = SexEnum.female
+ sample.sex = SexEnum.unknown
+
+ # WHEN validating the order
+ errors: list[SexSubjectIdError] = validate_subject_sex_consistency(
+ order=valid_order,
+ store=sample_store,
+ )
+
+ # THEN no error should be returned
+ assert not errors
+
+
+def test_validate_sex_subject_id_new_sex_unknown(valid_order: OrderWithCases, sample_store: Store):
+ # GIVEN an existing sample
+ sample = sample_store.session.query(Sample).first()
+
+ # GIVEN an order and sample with the same customer and subject id
+ valid_order.customer = sample.customer.internal_id
+ valid_order.cases[0].samples[0].subject_id = "subject"
+ sample.subject_id = "subject"
+
+ # GIVEN a sample in the order that has an unknown sex and the existing sample's sex is known
+ valid_order.cases[0].samples[0].sex = SexEnum.unknown
+ sample.sex = SexEnum.female
+
+ # WHEN validating the order
+ errors: list[SexSubjectIdError] = validate_subject_sex_consistency(
+ order=valid_order,
+ store=sample_store,
+ )
+
+ # THEN no error should be returned
+ assert not errors
+
+
+def test_validate_sample_names_different_from_case_names(
+ order_with_samples_having_same_names_as_cases: OrderWithCases, base_store: Store
+):
+ # GIVEN an order with a case holding samples with the same name as cases in the order
+
+ # WHEN validating that the sample names are different from the case names
+ errors: list[SampleNameSameAsCaseNameError] = validate_sample_names_different_from_case_names(
+ order=order_with_samples_having_same_names_as_cases, store=base_store
+ )
+
+ # THEN a list with two errors should be returned
+ assert len(errors) == 2
+
+ # THEN the errors should concern the same case and sample name and hold the correct indices
+ for error in errors:
+ assert isinstance(error, SampleNameSameAsCaseNameError)
+ assert error.case_index == 0
+
+ assert errors[0].sample_index == 0
+ assert errors[1].sample_index == 1
+
+
+def test_validate_sample_names_different_from_existing_case_names(
+ valid_order: TomteOrder, store_with_multiple_cases_and_samples: Store
+):
+ # GIVEN an order with a case holding samples with the same name as an existing case in the order
+ case = store_with_multiple_cases_and_samples.get_cases()[0]
+ existing_case = ExistingCase(internal_id=case.internal_id, panels=case.panels)
+ valid_order.cases.append(existing_case)
+ valid_order.cases[0].samples[0].name = case.name
+
+ # WHEN validating that the sample names are different from the case names
+ errors: list[SampleNameSameAsCaseNameError] = validate_sample_names_different_from_case_names(
+ order=valid_order, store=store_with_multiple_cases_and_samples
+ )
+
+ # THEN a list with one error should be returned
+ assert len(errors) == 1
+
+ # THEN the errors should concern the same case and sample name and hold the correct indices
+ error = errors[0]
+ assert isinstance(error, SampleNameSameAsCaseNameError)
+ assert error.case_index == 0
+ assert error.sample_index == 0
+
+
+def test_validate_not_all_samples_unknown_in_case(valid_order: OrderWithCases):
+
+ # GIVEN an order with a case with all samples unknown
+ for sample in valid_order.cases[0].samples:
+ sample.status = StatusEnum.unknown
+
+ # WHEN validating that not all samples are unknown in a case
+ errors: list[StatusUnknownError] = validate_not_all_samples_unknown_in_case(order=valid_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the case with all samples unknown
+ assert isinstance(errors[0], StatusUnknownError)
+
+
+def test_validate_buffer_required(mip_dna_order: MipDnaOrder, application_tag_required_buffer: str):
+
+ # GIVEN an order for which the buffer is only required for samples running certain applications
+
+ # GIVEN that one of its samples has an app tag which makes the elution buffer mandatory
+ sample = mip_dna_order.cases[0].samples[0]
+ sample.application = application_tag_required_buffer
+
+ # GIVEN that the sample has no buffer set
+ sample.elution_buffer = None
+
+ # WHEN validating that required buffers are set
+ errors: list[BufferMissingError] = validate_buffer_required(mip_dna_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the missing buffer
+ error = errors[0]
+ assert isinstance(error, BufferMissingError)
+ assert error.sample_index == 0 and error.case_index == 0
+
+
+def test_existing_sample_from_outside_of_collaboration(
+ mip_dna_order: MipDnaOrder,
+ store_with_multiple_cases_and_samples: Store,
+ sample_id_in_single_case: str,
+):
+
+ # GIVEN a customer from outside the order's customer's collaboration
+ new_customer = store_with_multiple_cases_and_samples.add_customer(
+ internal_id="NewCustomer",
+ name="New customer",
+ invoice_address="Test street",
+ invoice_reference="Invoice reference",
+ )
+ store_with_multiple_cases_and_samples.add_item_to_store(new_customer)
+ store_with_multiple_cases_and_samples.commit_to_store()
+
+ # GIVEN a sample belonging to the customer is added to the order
+ sample: Sample = store_with_multiple_cases_and_samples.get_sample_by_internal_id(
+ sample_id_in_single_case
+ )
+ sample.customer = new_customer
+ existing_sample = ExistingSample(internal_id=sample.internal_id)
+ mip_dna_order.cases[0].samples.append(existing_sample)
+
+ # WHEN validating that the order does not contain samples from outside the customer's collaboration
+ errors: list[SampleOutsideOfCollaborationError] = (
+ validate_existing_samples_belong_to_collaboration(
+ order=mip_dna_order, store=store_with_multiple_cases_and_samples
+ )
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the added existing case
+ assert isinstance(errors[0], SampleOutsideOfCollaborationError)
diff --git a/tests/services/orders/validation_service/test_model_validator.py b/tests/services/orders/validation_service/test_model_validator.py
new file mode 100644
index 0000000000..a68eeb760f
--- /dev/null
+++ b/tests/services/orders/validation_service/test_model_validator.py
@@ -0,0 +1,144 @@
+import pytest
+
+from cg.services.orders.validation.model_validator.model_validator import ModelValidator
+from cg.services.orders.validation.models.order import Order
+from cg.services.orders.validation.workflows.fluffy.models.order import FluffyOrder
+from cg.services.orders.validation.workflows.mutant.models.order import MutantOrder
+from cg.services.orders.validation.workflows.rml.models.order import RmlOrder
+from cg.services.orders.validation.workflows.tomte.models.order import TomteOrder
+
+
+@pytest.mark.parametrize(
+ "order_fixture, expected_index_sequence, order_model",
+ [
+ ("fluffy_order_to_submit", "C01 IDT_10nt_568 (TGTGAGCGAA-AACTCCGATC)", FluffyOrder),
+ ("rml_order_to_submit", "C01 IDT_10nt_568 (TGTGAGCGAA-AACTCCGATC)", RmlOrder),
+ ],
+ ids=["fluffy", "rml"],
+)
+def test_validate_pool_sample_default_index(
+ order_fixture: str,
+ expected_index_sequence: str,
+ order_model: type[Order],
+ model_validator: ModelValidator,
+ request: pytest.FixtureRequest,
+):
+ """Test the default index sequence is set for a pool sample without index sequence."""
+ # GIVEN a pool raw order with a sample without index sequence but correct index and index number
+ raw_order: dict = request.getfixturevalue(order_fixture)
+ assert not raw_order["samples"][0]["index_sequence"]
+
+ # WHEN validating the order
+ order, _ = model_validator.validate(order=raw_order, model=order_model)
+
+ # THEN the index sequence should be set to the default index sequence
+ assert order.samples[0].index_sequence == expected_index_sequence
+
+
+def test_validate_mutant_sample_gets_lab_and_region(
+ sarscov2_order_to_submit: dict, model_validator: ModelValidator
+):
+ """Test the lab address and region code are set for a mutant sample without these fields."""
+ # GIVEN a Mutant order with a sample without lab address and region code
+ assert not sarscov2_order_to_submit["samples"][0]["original_lab_address"]
+ assert not sarscov2_order_to_submit["samples"][0]["region_code"]
+
+ # WHEN validating the order
+ order, _ = model_validator.validate(order=sarscov2_order_to_submit, model=MutantOrder)
+
+ # THEN the lab address and region code should be set
+ assert order.samples[0].original_lab_address == "171 76 Stockholm"
+ assert order.samples[0].region_code == "01"
+
+
+def test_order_field_error(valid_order: TomteOrder, model_validator: ModelValidator):
+ # GIVEN a Tomte order with an order field error
+ valid_order.name = ""
+ raw_order: dict = valid_order.model_dump(by_alias=True)
+
+ # WHEN validating the order
+ _, errors = model_validator.validate(order=raw_order, model=TomteOrder)
+
+ # THEN there should be an order error
+ assert errors.order_errors
+
+ # THEN the error should concern the missing name
+ assert errors.order_errors[0].field == "name"
+
+
+def test_case_field_error(valid_order: TomteOrder, model_validator: ModelValidator):
+ # GIVEN a Tomte order with a case field error
+ valid_order.cases[0].priority = None
+ raw_order: dict = valid_order.model_dump()
+
+ # WHEN validating the order
+ _, errors = model_validator.validate(order=raw_order, model=TomteOrder)
+
+ # THEN there should be a case error
+ assert errors.case_errors
+
+ # THEN the error should concern the missing name
+ assert errors.case_errors[0].field == "priority"
+
+
+def test_case_sample_field_error(valid_order: TomteOrder, model_validator: ModelValidator):
+
+ # GIVEN a Tomte order with a case sample error
+ valid_order.cases[0].samples[0].well_position = 1.8
+ raw_order: dict = valid_order.model_dump()
+
+ # WHEN validating the order
+ _, errors = model_validator.validate(order=raw_order, model=TomteOrder)
+
+ # THEN a case sample error should be returned
+ assert errors.case_sample_errors
+
+ # THEN the case sample error should concern the invalid data type
+ assert errors.case_sample_errors[0].field == "well_position"
+
+
+def test_order_case_and_case_sample_field_error(
+ valid_order: TomteOrder, model_validator: ModelValidator
+):
+ # GIVEN a Tomte order with an order, case and case sample error
+ valid_order.name = None
+ valid_order.cases[0].priority = None
+ valid_order.cases[0].samples[0].well_position = 1.8
+ raw_order: dict = valid_order.model_dump(by_alias=True)
+
+ # WHEN validating the order
+ _, errors = model_validator.validate(order=raw_order, model=TomteOrder)
+
+ # THEN all errors should be returned
+ assert errors.order_errors
+ assert errors.case_errors
+ assert errors.case_sample_errors
+
+ # THEN the errors should concern the relevant fields
+ assert errors.order_errors[0].field == "name"
+ assert errors.case_errors[0].field == "priority"
+ assert errors.case_sample_errors[0].field == "well_position"
+
+
+def test_null_conversion(valid_order: TomteOrder, model_validator: ModelValidator):
+ # GIVEN a Tomte order with a sample with empty concentration
+ valid_order.cases[0].samples[0].concentration_ng_ul = ""
+ raw_order: dict = valid_order.model_dump(by_alias=True)
+
+ # WHEN validating the order
+ order, _ = model_validator.validate(order=raw_order, model=TomteOrder)
+
+ # THEN the empty concentration should be converted to None
+ assert order.cases[0].samples[0].concentration_ng_ul is None
+
+
+def test_skip_rc_default_conversion(valid_order: TomteOrder, model_validator: ModelValidator):
+ # GIVEN a Tomte order with skip_reception_control set to None
+ valid_order.skip_reception_control = None
+ raw_order: dict = valid_order.model_dump(by_alias=True)
+
+ # WHEN validating the order
+ order, _ = model_validator.validate(order=raw_order, model=TomteOrder)
+
+ # THEN the skip_reception_control value should be converted to None
+ assert order.skip_reception_control is False
diff --git a/tests/services/orders/validation_service/test_order_rules.py b/tests/services/orders/validation_service/test_order_rules.py
new file mode 100644
index 0000000000..cffac34469
--- /dev/null
+++ b/tests/services/orders/validation_service/test_order_rules.py
@@ -0,0 +1,64 @@
+from cg.services.orders.validation.errors.order_errors import (
+ CustomerCannotSkipReceptionControlError,
+ CustomerDoesNotExistError,
+ UserNotAssociatedWithCustomerError,
+)
+from cg.services.orders.validation.rules.order.rules import (
+ validate_customer_can_skip_reception_control,
+ validate_customer_exists,
+ validate_user_belongs_to_customer,
+)
+from cg.services.orders.validation.workflows.tomte.models.order import TomteOrder
+from cg.store.models import Customer
+from cg.store.store import Store
+
+
+def test_validate_customer_can_skip_reception_control(base_store: Store, valid_order: TomteOrder):
+ # GIVEN an order attempting to skip reception control from a not trusted customer
+ customer: Customer = base_store.get_customer_by_internal_id(valid_order.customer)
+ customer.is_trusted = False
+ valid_order.skip_reception_control = True
+
+ # WHEN validating that the customer can skip reception control
+ errors: list[CustomerCannotSkipReceptionControlError] = (
+ validate_customer_can_skip_reception_control(order=valid_order, store=base_store)
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the customer not being allowed to skip reception control
+ assert isinstance(errors[0], CustomerCannotSkipReceptionControlError)
+
+
+def test_validate_customer_does_not_exist(base_store: Store, valid_order: TomteOrder):
+ # GIVEN an order from an unknown customer
+ valid_order.customer = "Unknown customer"
+
+ # WHEN validating that the customer exists
+ errors: list[CustomerDoesNotExistError] = validate_customer_exists(
+ order=valid_order, store=base_store
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the unknown customer
+ assert isinstance(errors[0], CustomerDoesNotExistError)
+
+
+def test_validate_user_belongs_to_customer(base_store: Store, valid_order: TomteOrder):
+ # GIVEN an order for a customer which the logged-in user does not have access to
+ customer: Customer = base_store.get_customer_by_internal_id(valid_order.customer)
+ customer.users = []
+
+ # WHEN validating that the user belongs to the customer account
+ errors: list[UserNotAssociatedWithCustomerError] = validate_user_belongs_to_customer(
+ order=valid_order, store=base_store
+ )
+
+ # THEN an error should be raised
+ assert errors
+
+ # THEN the error should concern the user not belonging to the customer
+ assert isinstance(errors[0], UserNotAssociatedWithCustomerError)
diff --git a/tests/services/orders/validation_service/test_validation_service.py b/tests/services/orders/validation_service/test_validation_service.py
new file mode 100644
index 0000000000..16add7d335
--- /dev/null
+++ b/tests/services/orders/validation_service/test_validation_service.py
@@ -0,0 +1,21 @@
+import pytest
+
+from cg.exc import OrderError
+from cg.models.orders.constants import OrderType
+from cg.services.orders.validation.service import OrderValidationService
+
+
+def test_parse_and_validate_pydantic_error(
+ order_validation_service: OrderValidationService, invalid_balsamic_order_to_submit: dict
+):
+ # GIVEN a raw order that will fail validation and a validation service
+
+ # WHEN parsing and validating the order
+
+ # THEN an OrderError should be raised
+ with pytest.raises(OrderError):
+ order_validation_service.parse_and_validate(
+ raw_order=invalid_balsamic_order_to_submit,
+ order_type=OrderType.BALSAMIC,
+ user_id=1,
+ )
diff --git a/tests/services/orders/validation_service/workflows/__init__.py b/tests/services/orders/validation_service/workflows/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/services/orders/validation_service/workflows/balsamic/__init__.py b/tests/services/orders/validation_service/workflows/balsamic/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/services/orders/validation_service/workflows/balsamic/conftest.py b/tests/services/orders/validation_service/workflows/balsamic/conftest.py
new file mode 100644
index 0000000000..0db505a3d7
--- /dev/null
+++ b/tests/services/orders/validation_service/workflows/balsamic/conftest.py
@@ -0,0 +1,107 @@
+import pytest
+
+from cg.constants.constants import CAPTUREKIT_CANCER_OPTIONS, GenomeVersion
+from cg.models.orders.constants import OrderType
+from cg.models.orders.sample_base import ContainerEnum, ControlEnum, SexEnum, StatusEnum
+from cg.services.orders.validation.constants import MINIMUM_VOLUME, ElutionBuffer
+from cg.services.orders.validation.order_type_maps import ORDER_TYPE_RULE_SET_MAP, RuleSet
+from cg.services.orders.validation.service import OrderValidationService
+from cg.services.orders.validation.workflows.balsamic.constants import BalsamicDeliveryType
+from cg.services.orders.validation.workflows.balsamic.models.case import BalsamicCase
+from cg.services.orders.validation.workflows.balsamic.models.order import BalsamicOrder
+from cg.services.orders.validation.workflows.balsamic.models.sample import BalsamicSample
+from cg.store.models import Application, Customer, User
+from cg.store.store import Store
+
+
+def create_sample(id: int) -> BalsamicSample:
+ return BalsamicSample(
+ name=f"name{id}",
+ application="PANKTTR020",
+ capture_kit=CAPTUREKIT_CANCER_OPTIONS[0],
+ container=ContainerEnum.plate,
+ container_name="ContainerName",
+ control=ControlEnum.not_control,
+ elution_buffer=ElutionBuffer.WATER,
+ require_qc_ok=True,
+ reference_genome=GenomeVersion.HG19,
+ sex=SexEnum.female,
+ source="source",
+ status=StatusEnum.affected,
+ subject_id=f"subject{id}",
+ well_position=f"A:{id}",
+ volume=MINIMUM_VOLUME,
+ tumour=False,
+ )
+
+
+def create_case(samples: list[BalsamicSample]) -> BalsamicCase:
+ return BalsamicCase(
+ name="name",
+ samples=samples,
+ )
+
+
+def create_order(cases: list[BalsamicCase]) -> BalsamicOrder:
+ order = BalsamicOrder(
+ delivery_type=BalsamicDeliveryType.FASTQ_ANALYSIS,
+ name="order_name",
+ project_type=OrderType.BALSAMIC,
+ customer="cust000",
+ cases=cases,
+ )
+ order._user_id = 1
+ order._generated_ticket_id = 12345
+ return order
+
+
+@pytest.fixture
+def valid_order() -> BalsamicOrder:
+ sample = create_sample(1)
+ case = create_case([sample])
+ return create_order([case])
+
+
+@pytest.fixture
+def balsamic_application(base_store: Store) -> Application:
+ application: Application = base_store.add_application(
+ tag="PANKTTR020",
+ prep_category="tgs",
+ description="This is an application which is compatible with balsamic",
+ percent_kth=100,
+ percent_reads_guaranteed=90,
+ sample_concentration_minimum=50,
+ sample_concentration_maximum=250,
+ )
+ application.order_types = [OrderType.BALSAMIC]
+ base_store.session.add(application)
+ base_store.commit_to_store()
+ return application
+
+
+@pytest.fixture
+def balsamic_validation_service(
+ base_store: Store,
+ balsamic_application: Application,
+) -> OrderValidationService:
+ customer: Customer = base_store.get_customer_by_internal_id("cust000")
+ user: User = base_store.add_user(customer=customer, email="mail@email.com", name="new user")
+ base_store.session.add(user)
+ base_store.session.add(balsamic_application)
+ base_store.session.commit()
+ return OrderValidationService(base_store)
+
+
+@pytest.fixture
+def balsamic_rule_set() -> RuleSet:
+ return ORDER_TYPE_RULE_SET_MAP[OrderType.BALSAMIC]
+
+
+@pytest.fixture
+def another_balsamic_sample() -> BalsamicSample:
+ return create_sample(2)
+
+
+@pytest.fixture
+def a_third_balsamic_sample() -> BalsamicSample:
+ return create_sample(3)
diff --git a/tests/services/orders/validation_service/workflows/balsamic/test_rules.py b/tests/services/orders/validation_service/workflows/balsamic/test_rules.py
new file mode 100644
index 0000000000..d98e8e18ff
--- /dev/null
+++ b/tests/services/orders/validation_service/workflows/balsamic/test_rules.py
@@ -0,0 +1,105 @@
+from cg.services.orders.validation.errors.case_errors import (
+ DoubleNormalError,
+ DoubleTumourError,
+ MoreThanTwoSamplesInCaseError,
+ NumberOfNormalSamplesError,
+)
+from cg.services.orders.validation.errors.case_sample_errors import CaptureKitMissingError
+from cg.services.orders.validation.rules.case.rules import (
+ validate_at_most_two_samples_per_case,
+ validate_number_of_normal_samples,
+)
+from cg.services.orders.validation.rules.case_sample.rules import (
+ validate_capture_kit_panel_requirement,
+)
+from cg.services.orders.validation.workflows.balsamic.models.order import BalsamicOrder
+from cg.services.orders.validation.workflows.balsamic.models.sample import BalsamicSample
+from cg.store.models import Application
+from cg.store.store import Store
+
+
+def test_validate_capture_kit_required(
+ valid_order: BalsamicOrder, base_store: Store, application_tgs: Application
+):
+
+ # GIVEN an order with a TGS sample but missing capture kit
+ valid_order.cases[0].samples[0].application = application_tgs.tag
+ valid_order.cases[0].samples[0].capture_kit = None
+
+ # WHEN validating that the order has required capture kits set
+ errors: list[CaptureKitMissingError] = validate_capture_kit_panel_requirement(
+ order=valid_order, store=base_store
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the missing capture kit
+ assert isinstance(errors[0], CaptureKitMissingError)
+
+
+def test_more_than_two_samples_in_case(
+ valid_order: BalsamicOrder,
+ another_balsamic_sample: BalsamicSample,
+ a_third_balsamic_sample: BalsamicSample,
+):
+ # GIVEN a Balsamic order with three samples in the same case
+
+ valid_order.cases[0].samples.append(another_balsamic_sample)
+ valid_order.cases[0].samples.append(a_third_balsamic_sample)
+
+ # WHEN validating that the order has at most one sample per case
+ errors: list[MoreThanTwoSamplesInCaseError] = validate_at_most_two_samples_per_case(valid_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the multiple samples in the first case
+ assert isinstance(errors[0], MoreThanTwoSamplesInCaseError)
+ assert errors[0].case_index == 0
+
+
+def test_double_tumour_samples_in_case(
+ valid_order: BalsamicOrder, another_balsamic_sample: BalsamicSample, base_store: Store
+):
+ # GIVEN a Balsamic order with two samples in a case
+ valid_order.cases[0].samples.append(another_balsamic_sample)
+
+ # GIVEN that both samples are tumours
+ valid_order.cases[0].samples[0].tumour = True
+ valid_order.cases[0].samples[1].tumour = True
+
+ # WHEN validating that the order has at most one sample per case
+ errors: list[NumberOfNormalSamplesError] = validate_number_of_normal_samples(
+ order=valid_order, store=base_store
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the double tumours in the case
+ assert isinstance(errors[0], DoubleTumourError)
+ assert errors[0].case_index == 0
+
+
+def test_double_normal_samples_in_case(
+ valid_order: BalsamicOrder, another_balsamic_sample: BalsamicSample, base_store: Store
+):
+ # GIVEN a Balsamic order with two samples in a case
+ valid_order.cases[0].samples.append(another_balsamic_sample)
+
+ # GIVEN that both samples are tumours
+ valid_order.cases[0].samples[0].tumour = False
+ valid_order.cases[0].samples[1].tumour = False
+
+ # WHEN validating that the order has at most one sample per case
+ errors: list[NumberOfNormalSamplesError] = validate_number_of_normal_samples(
+ order=valid_order, store=base_store
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern the double tumours in the case
+ assert isinstance(errors[0], DoubleNormalError)
+ assert errors[0].case_index == 0
diff --git a/tests/services/orders/validation_service/workflows/balsamic/test_validation_integration.py b/tests/services/orders/validation_service/workflows/balsamic/test_validation_integration.py
new file mode 100644
index 0000000000..1273fec534
--- /dev/null
+++ b/tests/services/orders/validation_service/workflows/balsamic/test_validation_integration.py
@@ -0,0 +1,97 @@
+from cg.models.orders.constants import OrderType
+from cg.services.orders.validation.order_type_maps import RuleSet
+from cg.services.orders.validation.service import OrderValidationService
+from cg.services.orders.validation.workflows.balsamic.models.order import BalsamicOrder
+
+
+def test_valid_order(
+ valid_order: BalsamicOrder,
+ balsamic_validation_service: OrderValidationService,
+ balsamic_rule_set: RuleSet,
+):
+
+ # GIVEN a valid order
+
+ # WHEN validating the order
+ errors = balsamic_validation_service._get_errors(
+ raw_order=valid_order.model_dump(by_alias=True),
+ model=BalsamicOrder,
+ rule_set=balsamic_rule_set,
+ user_id=valid_order._user_id,
+ )
+
+ # THEN no errors should be raised
+ assert not errors.order_errors
+ assert not errors.case_errors
+ assert not errors.case_sample_errors
+
+
+def test_valid_order_conversion(
+ valid_order: BalsamicOrder,
+ balsamic_validation_service: OrderValidationService,
+):
+
+ # GIVEN a valid order
+ order: dict = valid_order.model_dump()
+
+ # WHEN validating the order
+ response = balsamic_validation_service.get_validation_response(
+ raw_order=order, order_type=OrderType.BALSAMIC, user_id=valid_order._user_id
+ )
+
+ # THEN a response should be given
+ assert response
+
+
+def test_order_error_conversion(
+ valid_order: BalsamicOrder,
+ balsamic_validation_service: OrderValidationService,
+):
+
+ # GIVEN an order with a missing field on order level
+ valid_order.name = ""
+ order: dict = valid_order.model_dump()
+
+ # WHEN validating the order
+ response: dict = balsamic_validation_service.get_validation_response(
+ raw_order=order, order_type=OrderType.BALSAMIC, user_id=valid_order._user_id
+ )
+
+ # THEN there should be an error for the missing name
+ assert response["name"]["errors"]
+
+
+def test_case_error_conversion(
+ valid_order: BalsamicOrder,
+ balsamic_validation_service: OrderValidationService,
+):
+
+ # GIVEN an order with a faulty case priority
+ valid_order.cases[0].priority = "Non-existent priority"
+ order = valid_order.model_dump()
+
+ # WHEN validating the order
+ response: dict = balsamic_validation_service.get_validation_response(
+ raw_order=order, order_type=OrderType.BALSAMIC, user_id=valid_order._user_id
+ )
+
+ # THEN there should be an error for the faulty priority
+ assert response["cases"][0]["priority"]["errors"]
+
+
+def test_sample_error_conversion(
+ valid_order: BalsamicOrder,
+ balsamic_validation_service: OrderValidationService,
+):
+
+ # GIVEN an order with a sample with an invalid field
+ valid_order.cases[0].samples[0].volume = 1
+ invalid_order: dict = valid_order.model_dump(by_alias=True)
+
+ # WHEN validating the order
+ response = balsamic_validation_service.get_validation_response(
+ raw_order=invalid_order, order_type=OrderType.BALSAMIC, user_id=valid_order._user_id
+ )
+
+ # THEN an error should be returned regarding the invalid volume
+ assert response["cases"][0]["samples"][0]["volume"]["errors"]
diff --git a/tests/services/orders/validation_service/workflows/tomte/__init__.py b/tests/services/orders/validation_service/workflows/tomte/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/services/orders/validation_service/workflows/tomte/test_case_sample_rules.py b/tests/services/orders/validation_service/workflows/tomte/test_case_sample_rules.py
new file mode 100644
index 0000000000..ed244d2330
--- /dev/null
+++ b/tests/services/orders/validation_service/workflows/tomte/test_case_sample_rules.py
@@ -0,0 +1,162 @@
+from cg.models.orders.sample_base import StatusEnum
+from cg.services.orders.validation.errors.case_errors import (
+ InvalidGenePanelsError,
+ RepeatedGenePanelsError,
+)
+from cg.services.orders.validation.errors.case_sample_errors import (
+ DescendantAsFatherError,
+ FatherNotInCaseError,
+ InvalidFatherSexError,
+ PedigreeError,
+ SampleIsOwnFatherError,
+)
+from cg.services.orders.validation.models.existing_sample import ExistingSample
+from cg.services.orders.validation.rules.case.rules import validate_gene_panels_unique
+from cg.services.orders.validation.rules.case_sample.rules import (
+ validate_fathers_are_male,
+ validate_fathers_in_same_case_as_children,
+ validate_gene_panels_exist,
+ validate_pedigree,
+)
+from cg.services.orders.validation.workflows.tomte.models.order import TomteOrder
+from cg.store.store import Store
+from tests.store_helpers import StoreHelpers
+
+
+def test_invalid_gene_panels(valid_order: TomteOrder, base_store: Store):
+ # GIVEN an order with an invalid gene panel specified
+ invalid_panel = "Non-existent panel"
+ valid_order.cases[0].panels = [invalid_panel]
+
+ # WHEN validating that the gene panels exist
+ errors: list[InvalidGenePanelsError] = validate_gene_panels_exist(
+ order=valid_order, store=base_store
+ )
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern invalid gene panels
+ assert isinstance(errors[0], InvalidGenePanelsError)
+
+
+def test_repeated_gene_panels(valid_order: TomteOrder, store_with_panels: Store):
+ # GIVEN an order with repeated gene panels specified
+ panel: str = store_with_panels.get_panels()[0].abbrev
+ valid_order.cases[0].panels = [panel, panel]
+
+ # WHEN validating that the gene panels are unique
+ errors: list[RepeatedGenePanelsError] = validate_gene_panels_unique(valid_order)
+
+ # THEN an error should be returned
+ assert errors
+
+ # THEN the error should concern repeated gene panels
+ assert isinstance(errors[0], RepeatedGenePanelsError)
+
+
+def test_father_must_be_male(order_with_invalid_father_sex: TomteOrder):
+ # GIVEN an order with an incorrectly specified father
+
+ # WHEN validating the order
+ errors: list[InvalidFatherSexError] = validate_fathers_are_male(order_with_invalid_father_sex)
+
+ # THEN errors are returned
+ assert errors
+
+ # THEN the errors are about the father sex
+ assert isinstance(errors[0], InvalidFatherSexError)
+
+
+def test_father_in_wrong_case(order_with_father_in_wrong_case: TomteOrder):
+
+ # GIVEN an order with the father sample in the wrong case
+
+ # WHEN validating the order
+ errors: list[FatherNotInCaseError] = validate_fathers_in_same_case_as_children(
+ order_with_father_in_wrong_case
+ )
+
+ # THEN an error is returned
+ assert errors
+
+ # THEN the error is about the father being in the wrong case
+ assert isinstance(errors[0], FatherNotInCaseError)
+
+
+def test_sample_cannot_be_its_own_father(valid_order: TomteOrder, base_store: Store):
+ # GIVEN an order with a sample which has itself as a parent
+ sample = valid_order.cases[0].samples[0]
+ sample.father = sample.name
+
+ # WHEN validating the order
+ errors: list[PedigreeError] = validate_pedigree(order=valid_order, store=base_store)
+
+ # THEN an error is returned
+ assert errors
+
+ # THEN the error is about the sample having itself as a parent
+ assert isinstance(errors[0], SampleIsOwnFatherError)
+
+
+def test_sample_cycle_not_allowed(order_with_sample_cycle: TomteOrder, base_store: Store):
+ # GIVEN an order where a sample is a descendant of itself
+
+ # WHEN validating the order
+ errors: list[PedigreeError] = validate_pedigree(order=order_with_sample_cycle, store=base_store)
+
+ # THEN an error is returned
+ assert errors
+
+ # THEN the error is about the sample being a descendant of itself
+ assert isinstance(errors[0], DescendantAsFatherError)
+
+
+def test_incest_is_allowed(order_with_siblings_as_parents: TomteOrder, base_store: Store):
+ # GIVEN an order where parents are siblings
+
+ # WHEN validating the order
+ errors: list[PedigreeError] = validate_pedigree(
+ order=order_with_siblings_as_parents, store=base_store
+ )
+
+ # THEN no error is returned
+ assert not errors
+
+
+def test_existing_samples_in_tree(
+ valid_order: TomteOrder, base_store: Store, helpers: StoreHelpers
+):
+ # GIVEN a valid order where an existing sample is added
+ sample = helpers.add_sample(store=base_store)
+ existing_sample = ExistingSample(internal_id=sample.internal_id, status=StatusEnum.affected)
+ valid_order.cases[0].samples.append(existing_sample)
+
+ # WHEN validating the order
+ errors: list[PedigreeError] = validate_pedigree(order=valid_order, store=base_store)
+
+ # THEN no error is returned
+ assert not errors
+
+
+def test_existing_sample_cycle_not_allowed(
+ order_with_existing_sample_cycle: TomteOrder, base_store: Store, helpers: StoreHelpers
+):
+
+ # GIVEN an order containing an existing sample and a cycle
+ existing_sample = order_with_existing_sample_cycle.cases[0].samples[1]
+ assert not existing_sample.is_new
+ helpers.add_sample(
+ store=base_store, name="ExistingSampleName", internal_id=existing_sample.internal_id
+ )
+
+ # WHEN validating the order
+ errors: list[PedigreeError] = validate_pedigree(
+ order=order_with_existing_sample_cycle, store=base_store
+ )
+
+ # THEN an error is returned
+ assert errors
+
+ # THEN the error is about the sample being a descendant of itself
+ assert isinstance(errors[0], DescendantAsFatherError)
diff --git a/tests/services/orders/validation_service/workflows/tomte/test_validation_integration.py b/tests/services/orders/validation_service/workflows/tomte/test_validation_integration.py
new file mode 100644
index 0000000000..b6c0e4e954
--- /dev/null
+++ b/tests/services/orders/validation_service/workflows/tomte/test_validation_integration.py
@@ -0,0 +1,94 @@
+from cg.models.orders.constants import OrderType
+from cg.services.orders.validation.order_type_maps import RuleSet
+from cg.services.orders.validation.service import OrderValidationService
+from cg.services.orders.validation.workflows.tomte.models.order import TomteOrder
+
+
+def test_valid_order(
+ valid_order: TomteOrder,
+ tomte_validation_service: OrderValidationService,
+ tomte_rule_set: RuleSet,
+):
+
+ # GIVEN a valid order
+
+ # WHEN validating the order
+ errors = tomte_validation_service._get_errors(
+ raw_order=valid_order.model_dump(by_alias=True),
+ model=TomteOrder,
+ rule_set=tomte_rule_set,
+ user_id=valid_order._user_id,
+ )
+
+ # THEN no errors should be raised
+ assert not errors.order_errors
+ assert not errors.case_errors
+ assert not errors.case_sample_errors
+
+
+def test_valid_order_conversion(
+ valid_order: TomteOrder,
+ tomte_validation_service: OrderValidationService,
+):
+
+ # GIVEN a valid order
+ order: dict = valid_order.model_dump(by_alias=True)
+
+ # WHEN validating the order
+ response = tomte_validation_service.get_validation_response(
+ raw_order=order, order_type=OrderType.TOMTE, user_id=valid_order._user_id
+ )
+
+ # THEN a response should be given
+ assert response
+
+
+def test_order_error_conversion(
+ valid_order: TomteOrder,
+ tomte_validation_service: OrderValidationService,
+):
+
+ # GIVEN an order with a missing field on order level
+ valid_order.name = ""
+ order: dict = valid_order.model_dump(by_alias=True)
+
+ # WHEN validating the order
+ response: dict = tomte_validation_service.get_validation_response(
+ raw_order=order, order_type=OrderType.TOMTE, user_id=valid_order._user_id
+ )
+
+ # THEN there should be an error for the missing name
+ assert response["name"]["errors"]
+
+
+def test_case_error_conversion(valid_order, tomte_validation_service: OrderValidationService):
+
+ # GIVEN an order with a faulty case priority
+ valid_order.cases[0].priority = "Non-existent priority"
+ order = valid_order.model_dump(by_alias=True)
+
+ # WHEN validating the order
+ response: dict = tomte_validation_service.get_validation_response(
+ raw_order=order, order_type=OrderType.TOMTE, user_id=valid_order._user_id
+ )
+
+ # THEN there should be an error for the faulty priority
+ assert response["cases"][0]["priority"]["errors"]
+
+
+def test_sample_error_conversion(
+ valid_order: TomteOrder,
+ tomte_validation_service: OrderValidationService,
+):
+
+ # GIVEN an order with a sample with an invalid field
+ valid_order.cases[0].samples[0].volume = 1
+ invalid_order: dict = valid_order.model_dump(by_alias=True)
+
+ # WHEN validating the order
+ response = tomte_validation_service.get_validation_response(
+ raw_order=invalid_order, order_type=OrderType.TOMTE, user_id=valid_order._user_id
+ )
+
+ # THEN an error should be returned regarding the invalid volume
+ assert response["cases"][0]["samples"][0]["volume"]["errors"]
diff --git a/tests/services/pdc_service/test_pdc_service.py b/tests/services/pdc_service/test_pdc_service.py
index f0760ee7b1..d274d38bd5 100644
--- a/tests/services/pdc_service/test_pdc_service.py
+++ b/tests/services/pdc_service/test_pdc_service.py
@@ -1,12 +1,12 @@
"""Tests for the meta PdcAPI"""
from unittest import mock
+
import pytest
+
from cg.constants import EXIT_FAIL
from cg.constants.process import EXIT_WARNING
-from cg.exc import (
- PdcError,
-)
+from cg.exc import PdcError
from cg.models.cg_config import CGConfig
from tests.conftest import create_process_response
@@ -80,9 +80,12 @@ def test_run_dsmc_command_fail(cg_context: CGConfig):
pdc_service = cg_context.pdc_service
# GIVEN an exit code signifying failure
- with pytest.raises(PdcError), mock.patch(
- "cg.utils.commands.subprocess.run",
- return_value=create_process_response(return_code=EXIT_FAIL),
+ with (
+ pytest.raises(PdcError),
+ mock.patch(
+ "cg.utils.commands.subprocess.run",
+ return_value=create_process_response(return_code=EXIT_FAIL),
+ ),
):
# WHEN running a dsmc command
pdc_service.run_dsmc_command(["archive", "something"])
diff --git a/tests/services/run_devices/pacbio/store_service/test_store_service.py b/tests/services/run_devices/pacbio/store_service/test_store_service.py
index c94d9f6160..8ecc69463d 100644
--- a/tests/services/run_devices/pacbio/store_service/test_store_service.py
+++ b/tests/services/run_devices/pacbio/store_service/test_store_service.py
@@ -97,10 +97,13 @@ def test_store_post_processing_data_error_database(
# GIVEN a store that raises an error when creating a PacBio SMRT cell
# WHEN trying to store data for a Pacbio instrument run
- with mock.patch(
- "cg.services.run_devices.pacbio.data_transfer_service.data_transfer_service.PacBioDataTransferService.get_post_processing_dtos",
- return_value=pac_bio_dtos,
- ), mock.patch.object(Store, "create_pac_bio_smrt_cell", side_effect=ValueError):
+ with (
+ mock.patch(
+ "cg.services.run_devices.pacbio.data_transfer_service.data_transfer_service.PacBioDataTransferService.get_post_processing_dtos",
+ return_value=pac_bio_dtos,
+ ),
+ mock.patch.object(Store, "create_pac_bio_smrt_cell", side_effect=ValueError),
+ ):
# THEN a PostProcessingStoreDataError is raised
with pytest.raises(PostProcessingStoreDataError):
pac_bio_store_service.store_post_processing_data(pacbio_barcoded_run_data)
@@ -116,13 +119,16 @@ def test_store_post_processing_data_error_parser(
# GIVEN a data transfer service that raises an error when parsing data
# WHEN trying to store data for a PacBio instrument ru
- with mock.patch(
- "cg.services.run_devices.pacbio.data_transfer_service.data_transfer_service.PacBioDataTransferService.get_post_processing_dtos",
- return_value=pac_bio_dtos,
- ), mock.patch.object(
- PacBioDataTransferService,
- "get_post_processing_dtos",
- side_effect=PostProcessingDataTransferError,
+ with (
+ mock.patch(
+ "cg.services.run_devices.pacbio.data_transfer_service.data_transfer_service.PacBioDataTransferService.get_post_processing_dtos",
+ return_value=pac_bio_dtos,
+ ),
+ mock.patch.object(
+ PacBioDataTransferService,
+ "get_post_processing_dtos",
+ side_effect=PostProcessingDataTransferError,
+ ),
):
# THEN a PostProcessingStoreDataError is raised
with pytest.raises(PostProcessingStoreDataError):
diff --git a/tests/store/conftest.py b/tests/store/conftest.py
index 0c6c679dc1..49abb76f82 100644
--- a/tests/store/conftest.py
+++ b/tests/store/conftest.py
@@ -10,19 +10,10 @@
from cg.constants import Workflow
from cg.constants.devices import DeviceType
from cg.constants.priority import PriorityTerms
-from cg.constants.subject import PhenotypeStatus, Sex
+from cg.constants.subject import PhenotypeStatus
from cg.services.illumina.data_transfer.models import IlluminaFlowCellDTO
-from cg.services.orders.store_order_services.store_pool_order import StorePoolOrderService
-from cg.store.models import (
- Analysis,
- Application,
- Case,
- CaseSample,
- Customer,
- IlluminaFlowCell,
- Organism,
- Sample,
-)
+from cg.services.orders.storing.implementations.pool_order_service import StorePoolOrderService
+from cg.store.models import Analysis, Application, Case, CaseSample, IlluminaFlowCell, Sample
from cg.store.store import Store
from tests.store_helpers import StoreHelpers
diff --git a/tests/store/crud/conftest.py b/tests/store/crud/conftest.py
index d71ab9c025..d884fc947d 100644
--- a/tests/store/crud/conftest.py
+++ b/tests/store/crud/conftest.py
@@ -121,8 +121,14 @@ def store_with_samples_that_have_names(store: Store, helpers: StoreHelpers) -> S
@pytest.fixture
def store_with_rna_and_dna_samples_and_cases(store: Store, helpers: StoreHelpers) -> Store:
- """Return a store with 1 rna sample 3 dna samples related to the rna sample and 1 more dna sample not related to the dna sample."""
- helpers.add_sample(
+ """Return a store with:
+ - 1 rna sample
+ - 3 dna samples related to the rna sample (with different prep categories)
+ - 1 more dna sample not related to the dna sample
+ - 2 dna cases including the related dna sample
+ - 1 dna case including the unrelated dna sample"""
+
+ rna_sample: Sample = helpers.add_sample(
store=store,
internal_id="rna_sample",
application_type=SeqLibraryPrepCategory.WHOLE_TRANSCRIPTOME_SEQUENCING.value,
@@ -139,6 +145,7 @@ def store_with_rna_and_dna_samples_and_cases(store: Store, helpers: StoreHelpers
is_tumour=True,
customer_id="cust001",
)
+
helpers.add_sample(
store=store,
internal_id="related_dna_sample_2",
@@ -157,6 +164,7 @@ def store_with_rna_and_dna_samples_and_cases(store: Store, helpers: StoreHelpers
is_tumour=True,
customer_id="cust000",
)
+
helpers.add_sample(
store=store,
internal_id="not_related_dna_sample",
@@ -167,6 +175,15 @@ def store_with_rna_and_dna_samples_and_cases(store: Store, helpers: StoreHelpers
customer_id="cust000",
)
+ rna_case: Case = helpers.add_case(
+ store=store,
+ internal_id="rna_case",
+ name="rna_case",
+ data_analysis=Workflow.MIP_RNA,
+ customer_id="cust000",
+ )
+ helpers.add_relationship(store=store, sample=rna_sample, case=rna_case)
+
related_dna_case_1: Case = helpers.add_case(
store=store,
internal_id="related_dna_case_1",
@@ -174,6 +191,7 @@ def store_with_rna_and_dna_samples_and_cases(store: Store, helpers: StoreHelpers
customer_id="cust001",
)
helpers.add_relationship(store=store, sample=related_dna_sample_1, case=related_dna_case_1)
+ helpers.add_analysis(store=store, case=related_dna_case_1, uploaded_at=datetime.now())
related_dna_case_2: Case = helpers.add_case(
store=store,
@@ -208,16 +226,18 @@ def rna_sample_collaborators(rna_sample: Sample) -> set[Customer]:
@pytest.fixture
-def related_dna_sample_1(store_with_rna_and_dna_samples_and_cases: Store) -> Sample:
- return store_with_rna_and_dna_samples_and_cases.get_sample_by_internal_id(
- internal_id="related_dna_sample_1"
- )
+def rna_case(store_with_rna_and_dna_samples_and_cases: Store) -> Case:
+ return store_with_rna_and_dna_samples_and_cases.get_case_by_internal_id("rna_case")
@pytest.fixture
-def related_dna_samples(
- store_with_rna_and_dna_samples_and_cases: Store, related_dna_sample_1: Sample
-) -> list[Sample]:
+def related_dna_samples(store_with_rna_and_dna_samples_and_cases: Store) -> list[Sample]:
+ related_dna_sample_1: Sample = (
+ store_with_rna_and_dna_samples_and_cases.get_sample_by_internal_id(
+ internal_id="related_dna_sample_1"
+ )
+ )
+
related_dna_sample_2: Sample = (
store_with_rna_and_dna_samples_and_cases.get_sample_by_internal_id(
internal_id="related_dna_sample_2"
@@ -247,6 +267,15 @@ def related_dna_cases(store_with_rna_and_dna_samples_and_cases: Store) -> list[C
return [related_dna_case_1, related_dna_case_2]
+@pytest.fixture
+def uploaded_related_dna_case(related_dna_cases: list[Case]) -> list[Case]:
+ related_uploaded_dna_cases: list[Case] = []
+ for case in related_dna_cases:
+ if case.is_uploaded:
+ related_uploaded_dna_cases.append(case)
+ return related_uploaded_dna_cases
+
+
@pytest.fixture
def store_with_active_sample_analyze(
store: Store, helpers: StoreHelpers
@@ -472,7 +501,6 @@ def order_balsamic(helpers: StoreHelpers, store: Store) -> Order:
customer_id=2,
ticket_id=3,
order_date=datetime.now(),
- workflow=Workflow.BALSAMIC,
)
order.cases.append(case)
return order
diff --git a/tests/store/crud/read/test_read_case.py b/tests/store/crud/read/test_read_case.py
index 7970357cdf..c306e92cb6 100644
--- a/tests/store/crud/read/test_read_case.py
+++ b/tests/store/crud/read/test_read_case.py
@@ -1,31 +1,22 @@
-from cg.constants import Workflow
-from cg.store.models import Case, Customer, Sample
+from cg.store.models import Case
from cg.store.store import Store
-def test_get_related_cases(
+def test_get_uploaded_related_dna_case(
store_with_rna_and_dna_samples_and_cases: Store,
- related_dna_sample_1: Sample,
- rna_sample_collaborators: set[Customer],
+ rna_case: Case,
+ uploaded_related_dna_case: list[Case],
related_dna_cases: list[Case],
):
- # GIVEN a database with a sample in several cases
- # GIVEN a list of workflows
+ # GIVEN a database with an RNA case and several related DNA cases
+ # GIVEN that some of the DNA cases are uploaded and others not
+ store: Store = store_with_rna_and_dna_samples_and_cases
- workflows = [
- Workflow.MIP_DNA,
- Workflow.BALSAMIC,
- Workflow.BALSAMIC_UMI,
- ]
-
- # WHEN getting the cases from the given sample by the given workflows and within the given collaborators
- fetched_related_dna_cases: list[Case] = (
- store_with_rna_and_dna_samples_and_cases.get_related_cases(
- sample_internal_id=related_dna_sample_1.internal_id,
- workflows=workflows,
- collaborators=rna_sample_collaborators,
- )
+ # WHEN getting the related DNA cases that are uploaded
+ fetched_uploaded_related_dna_case: list[Case] = store.get_uploaded_related_dna_cases(
+ rna_case=rna_case,
)
# THEN the correct set of cases is returned
- assert set(related_dna_cases) == set(fetched_related_dna_cases)
+ assert set(fetched_uploaded_related_dna_case) == set(uploaded_related_dna_case)
+ assert set(fetched_uploaded_related_dna_case) != set(related_dna_cases)
diff --git a/tests/store/crud/read/test_read_sample.py b/tests/store/crud/read/test_read_sample.py
index 9af8e375dc..e2ce710ba1 100644
--- a/tests/store/crud/read/test_read_sample.py
+++ b/tests/store/crud/read/test_read_sample.py
@@ -6,7 +6,7 @@
from _pytest.fixtures import FixtureRequest
from sqlalchemy.orm import Query
-from cg.constants.sequencing import SeqLibraryPrepCategory
+from cg.constants.sequencing import DNA_PREP_CATEGORIES, SeqLibraryPrepCategory
from cg.store.models import Customer, Invoice, Sample
from cg.store.store import Store
from tests.store_helpers import StoreHelpers
@@ -614,18 +614,17 @@ def test_get_related_samples(
# GIVEN a database with an RNA sample and several DNA samples with the same subject_id and tumour status as the given sample
# GIVEN that all customers are in a collaboration
# GIVEN a list of dna prep categories
- dna_prep_categories: list[SeqLibraryPrepCategory] = [
- SeqLibraryPrepCategory.WHOLE_GENOME_SEQUENCING,
- SeqLibraryPrepCategory.TARGETED_GENOME_SEQUENCING,
- SeqLibraryPrepCategory.WHOLE_EXOME_SEQUENCING,
- ]
+ store: Store = store_with_rna_and_dna_samples_and_cases
+ prep_categories: list[SeqLibraryPrepCategory] = DNA_PREP_CATEGORIES
# WHEN getting the related DNA samples to the given sample
- fetched_related_dna_samples = store_with_rna_and_dna_samples_and_cases.get_related_samples(
- sample_internal_id=rna_sample.internal_id,
- prep_categories=dna_prep_categories,
+ fetched_related_dna_samples_query = store._get_related_samples_query(
+ sample=rna_sample,
+ prep_categories=prep_categories,
collaborators=rna_sample_collaborators,
)
+ fetched_related_dna_samples: list[Sample] = fetched_related_dna_samples_query.all()
+
# THEN the correct set of samples is returned
assert set(related_dna_samples) == set(fetched_related_dna_samples)
diff --git a/tests/store/filters/test_status_user_filters.py b/tests/store/filters/test_status_user_filters.py
index 397a66cb72..a6eeb8c5a7 100644
--- a/tests/store/filters/test_status_user_filters.py
+++ b/tests/store/filters/test_status_user_filters.py
@@ -1,5 +1,5 @@
from cg.store.filters.status_user_filters import filter_user_by_email
-from cg.store.models import User
+from cg.store.models import Customer, User
from cg.store.store import Store
@@ -51,3 +51,36 @@ def test_filter_user_by_email_none_returns_none(store_with_users: Store):
# THEN no user should be returned
assert filtered_user is None
+
+
+def test_filter_user_by_customer(store_with_users: Store):
+
+ # GIVEN a store with a user belonging to a customer
+ user: User = store_with_users._get_query(table=User).first()
+ customer: Customer = user.customers[0]
+
+ # WHEN filtering the user by customer
+ user_is_associated: bool = store_with_users.is_user_associated_with_customer(
+ user_id=user.id,
+ customer_internal_id=customer.internal_id,
+ )
+
+ # THEN the user should be associated with the customer
+ assert user_is_associated
+
+
+def test_filter_user_not_associated_with_customer(
+ store_with_users: Store, customer_without_users: Customer
+):
+
+ # GIVEN a store with a user not belonging to a specific customer
+ user: User = store_with_users._get_query(table=User).first()
+
+ # WHEN filtering the user by customer
+ user_is_associated: bool = store_with_users.is_user_associated_with_customer(
+ user_id=user.id,
+ customer_internal_id=customer_without_users.internal_id,
+ )
+
+ # THEN the user should not be associated with the customer
+ assert not user_is_associated
diff --git a/tests/store_helpers.py b/tests/store_helpers.py
index 16fc12df91..19ac29ddff 100644
--- a/tests/store_helpers.py
+++ b/tests/store_helpers.py
@@ -515,7 +515,6 @@ def add_order(
customer_id: int,
ticket_id: int,
order_date: datetime = datetime(year=2023, month=12, day=24),
- workflow: Workflow = Workflow.MIP_DNA,
) -> Order:
order = Order(
customer_id=customer_id,