Skip to content

Commit

Permalink
Merge branch 'master' into add_order_to_new_cases
Browse files Browse the repository at this point in the history
  • Loading branch information
islean authored Feb 20, 2024
2 parents 21f01e4 + df76a7b commit 626daa9
Show file tree
Hide file tree
Showing 20 changed files with 88 additions and 35 deletions.
2 changes: 1 addition & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 58.1.9
current_version = 59.2.1
commit = True
tag = True
tag_name = v{new_version}
Expand Down
32 changes: 32 additions & 0 deletions alembic/versions/2024_02_19_43eb680d6181_add_order_case_table.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
"""Add order case table
Revision ID: 43eb680d6181
Revises: d241d8c493fb
Create Date: 2024-02-19 10:13:21.075891
"""

import sqlalchemy as sa

from alembic import op

# revision identifiers, used by Alembic.
revision = "43eb680d6181"
down_revision = "d241d8c493fb"
branch_labels = None
depends_on = None


def upgrade():
op.create_table(
"order_case",
sa.Column("order_id", sa.Integer, nullable=False, index=True),
sa.Column("case_id", sa.Integer, nullable=False, index=True),
sa.ForeignKeyConstraint(["order_id"], ["order.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["case_id"], ["case.id"], ondelete="CASCADE"),
sa.UniqueConstraint("order_id", "case_id", name="_order_case_uc"),
)


def downgrade():
op.drop_table(table_name="order_case")
2 changes: 1 addition & 1 deletion cg/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
__title__ = "cg"
__version__ = "58.1.9"
__version__ = "59.2.1"
2 changes: 1 addition & 1 deletion cg/apps/demultiplex/demultiplex_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ def add_to_trailblazer(
out_dir=flow_cell.trailblazer_config_path.parent.as_posix(),
slurm_quality_of_service=self.slurm_quality_of_service,
email=self.mail,
data_analysis=Workflow.DEMULTIPLEX,
workflow=Workflow.DEMULTIPLEX,
)

def start_demultiplexing(self, flow_cell: FlowCellDirectoryData):
Expand Down
11 changes: 6 additions & 5 deletions cg/apps/tb/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from cg.constants.constants import APIMethods, FileFormat, JobType, WorkflowManager
from cg.constants.priority import SlurmQos
from cg.constants.tb import AnalysisStatus
from cg.exc import TrailblazerAPIHTTPError, TrailblazerAnalysisNotFound
from cg.exc import TrailblazerAnalysisNotFound, TrailblazerAPIHTTPError
from cg.io.controller import APIRequest, ReadStream

LOG = logging.getLogger(__name__)
Expand Down Expand Up @@ -109,7 +109,7 @@ def add_pending_analysis(
out_dir: str,
slurm_quality_of_service: SlurmQos,
email: str = None,
data_analysis: Workflow = None,
workflow: Workflow = None,
ticket: str = None,
workflow_manager: str = WorkflowManager.Slurm,
) -> TrailblazerAnalysis:
Expand All @@ -120,13 +120,14 @@ def add_pending_analysis(
"config_path": config_path,
"out_dir": out_dir,
"priority": slurm_quality_of_service,
"data_analysis": str(data_analysis).upper(),
"workflow": workflow.upper(),
"ticket": ticket,
"workflow_manager": workflow_manager,
}
LOG.debug(f"Submitting job to Trailblazer: {request_body}")
response = self.query_trailblazer(command="add-pending-analysis", request_body=request_body)
if response:
if response := self.query_trailblazer(
command="add-pending-analysis", request_body=request_body
):
return TrailblazerAnalysis.model_validate(response)

def set_analysis_uploaded(self, case_id: str, uploaded_at: datetime) -> None:
Expand Down
2 changes: 1 addition & 1 deletion cg/apps/tb/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ class TrailblazerAnalysis(BaseModel):
type: str | None = None
user_id: int | None = None
progress: float | None = 0.0
data_analysis: str | None = None
workflow: str | None = None
ticket: str | None = None
uploaded_at: str | None = None

Expand Down
2 changes: 1 addition & 1 deletion cg/cli/backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ def archive_spring_files(config: CGConfig, context: click.Context, dry_run: bool
LOG.info("Getting all spring files from Housekeeper.")
spring_files: Iterable[hk_models.File] = housekeeper_api.files(
tags=[SequencingFileTag.SPRING]
).filter(hk_models.File.path.like(f"%{config.environment}/{config.demultiplex.out_dir}%"))
).filter(hk_models.File.path.contains(f"{config.environment}/{config.demultiplex.out_dir}"))
for spring_file in spring_files:
LOG.info(f"Attempting encryption and PDC archiving for file {spring_file.path}")
if Path(spring_file.path).exists():
Expand Down
2 changes: 1 addition & 1 deletion cg/cli/upload/clinical_delivery.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def upload_clinical_delivery(context: click.Context, case_id: str, dry_run: bool
config_path=rsync_api.trailblazer_config_path.as_posix(),
out_dir=rsync_api.log_dir.as_posix(),
slurm_quality_of_service=Priority.priority_to_slurm_qos().get(case.priority),
data_analysis=Workflow.RSYNC,
workflow=Workflow.RSYNC,
ticket=case.latest_ticket,
)
trailblazer_api.add_upload_job_to_analysis(analysis_id=analysis.id, slurm_id=job_id)
Expand Down
2 changes: 1 addition & 1 deletion cg/cli/workflow/fastq/fastq_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,10 @@ def _add_analysis_to_trailblazer(self, case: Case) -> None:
self.trailblazer_api.add_pending_analysis(
case_id=case.internal_id,
analysis_type=AnalysisType.OTHER,
data_analysis=Workflow.FASTQ,
config_path="",
out_dir="",
slurm_quality_of_service=case.slurm_priority,
workflow=Workflow.FASTQ,
ticket=case.latest_ticket,
)
self.trailblazer_api.set_analysis_status(
Expand Down
9 changes: 5 additions & 4 deletions cg/constants/delivery.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,13 +138,14 @@
{"fastq"},
]

SARSCOV2_ANALYSIS_CASE_TAGS: list[set[str]] = [
MUTANT_ANALYSIS_CASE_TAGS: list[set[str]] = [
{"pangolin"},
{"ks-delivery"},
]

SARSCOV2_ANALYSIS_SAMPLE_TAGS: list[set[str]] = [
MUTANT_ANALYSIS_SAMPLE_TAGS: list[set[str]] = [
{"fastq"},
{"vcf", "vcf-report", "fohm-delivery"},
]

RNAFUSION_ANALYSIS_CASE_TAGS: list[set[str]] = [
Expand Down Expand Up @@ -198,8 +199,8 @@
"sample_tags": FASTQ_ANALYSIS_SAMPLE_TAGS,
},
Workflow.MUTANT: {
"case_tags": SARSCOV2_ANALYSIS_CASE_TAGS,
"sample_tags": SARSCOV2_ANALYSIS_SAMPLE_TAGS,
"case_tags": MUTANT_ANALYSIS_CASE_TAGS,
"sample_tags": MUTANT_ANALYSIS_SAMPLE_TAGS,
},
Workflow.RNAFUSION: {
"case_tags": RNAFUSION_ANALYSIS_CASE_TAGS,
Expand Down
2 changes: 1 addition & 1 deletion cg/meta/rsync/rsync_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def add_to_trailblazer_api(
out_dir=self.log_dir.as_posix(),
slurm_quality_of_service=self.slurm_quality_of_service,
email=self.mail_user,
data_analysis=Workflow.RSYNC,
workflow=Workflow.RSYNC,
ticket=ticket,
)

Expand Down
6 changes: 3 additions & 3 deletions cg/meta/workflow/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,14 +209,14 @@ def add_pending_trailblazer_analysis(self, case_id: str) -> None:
self.check_analysis_ongoing(case_id)
self.trailblazer_api.add_pending_analysis(
case_id=case_id,
email=environ_email(),
analysis_type=self.get_application_type(
self.status_db.get_case_by_internal_id(case_id).links[0].sample
),
out_dir=self.get_job_ids_path(case_id).parent.as_posix(),
config_path=self.get_job_ids_path(case_id).as_posix(),
out_dir=self.get_job_ids_path(case_id).parent.as_posix(),
slurm_quality_of_service=self.get_slurm_qos_for_case(case_id),
data_analysis=str(self.workflow),
email=environ_email(),
workflow=self.workflow,
ticket=self.status_db.get_latest_ticket_from_case(case_id),
workflow_manager=self.get_workflow_manager(),
)
Expand Down
10 changes: 9 additions & 1 deletion cg/meta/workflow/microsalt/microsalt.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,13 +89,21 @@ def get_config_path(self, filename: str) -> Path:

def get_job_ids_path(self, case_id: str) -> Path:
project_id: str = self.get_project_id(case_id)
return Path(
job_ids_path = Path(
self.root_dir,
"results",
"reports",
"trailblazer",
f"{project_id}_slurm_ids{FileExtensions.YAML}",
)
# Necessary due to how microsalt structures its output
self._ensure_old_job_ids_are_removed(job_ids_path)
return job_ids_path

def _ensure_old_job_ids_are_removed(self, job_ids_path: Path) -> None:
is_yaml_file: bool = job_ids_path.suffix == FileExtensions.YAML
if job_ids_path.exists() and is_yaml_file:
job_ids_path.unlink()

def get_deliverables_file_path(self, case_id: str) -> Path:
"""Returns a path where the microSALT deliverables file for the order_id should be
Expand Down
10 changes: 5 additions & 5 deletions cg/store/filters/status_case_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ def filter_cases_by_case_search(cases: Query, case_search: str, **kwargs) -> Que
return (
cases.filter(
or_(
Case.internal_id.like(f"%{case_search}%"),
Case.name.like(f"%{case_search}%"),
Case.internal_id.contains(case_search),
Case.name.contains(case_search),
)
)
if case_search
Expand Down Expand Up @@ -58,7 +58,7 @@ def filter_case_by_internal_id(cases: Query, internal_id: str, **kwargs) -> Quer

def filter_cases_by_internal_id_search(cases: Query, internal_id_search: str, **kwargs) -> Query:
"""Filter cases with internal ids matching the search pattern."""
return cases.filter(Case.internal_id.like(f"%{internal_id_search}%"))
return cases.filter(Case.internal_id.contains(internal_id_search))


def filter_cases_by_name(cases: Query, name: str, **kwargs) -> Query:
Expand All @@ -68,12 +68,12 @@ def filter_cases_by_name(cases: Query, name: str, **kwargs) -> Query:

def filter_cases_by_name_search(cases: Query, name_search: str, **kwargs) -> Query:
"""Filter cases with names matching the search pattern."""
return cases.filter(Case.name.like(f"%{name_search}%"))
return cases.filter(Case.name.contains(name_search))


def filter_cases_by_workflow_search(cases: Query, workflow_search: str, **kwargs) -> Query:
"""Filter cases with a workflow search pattern."""
return cases.filter(Case.data_analysis.ilike(f"%{workflow_search}%"))
return cases.filter(Case.data_analysis.contains(workflow_search))


def filter_cases_by_priority(cases: Query, priority: str, **kwargs) -> Query:
Expand Down
2 changes: 1 addition & 1 deletion cg/store/filters/status_flow_cell_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def filter_flow_cell_by_name(flow_cells: Query, flow_cell_name: str, **kwargs) -

def filter_flow_cell_by_name_search(flow_cells: Query, name_search: str, **kwargs) -> Query:
"""Return flow cell by flow cell id enquiry."""
return flow_cells.filter(Flowcell.name.like(f"%{name_search}%"))
return flow_cells.filter(Flowcell.name.contains(name_search))


def filter_flow_cells_with_statuses(
Expand Down
4 changes: 2 additions & 2 deletions cg/store/filters/status_pool_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@ def filter_pools_by_customer_id(pools: Query, customer_ids: list[int], **kwargs)

def filter_pools_by_name_enquiry(pools: Query, name_enquiry: str, **kwargs) -> Query:
"""Return pools by name enquiry."""
return pools.filter(Pool.name.like(f"%{name_enquiry}%"))
return pools.filter(Pool.name.contains(name_enquiry))


def filter_pools_by_order_enquiry(pools: Query, order_enquiry: str, **kwargs) -> Query:
"""Return pools by order enquiry."""
return pools.filter(Pool.order.like(f"%{order_enquiry}%"))
return pools.filter(Pool.order.contains(order_enquiry))


def filter_pools_by_entry_id(pools: Query, entry_id: int, **kwargs) -> Query:
Expand Down
6 changes: 3 additions & 3 deletions cg/store/filters/status_sample_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def filter_samples_by_internal_id_pattern(
samples: Query, internal_id_pattern: str, **kwargs
) -> Query:
"""Return samples matching the internal id pattern."""
return samples.filter(Sample.internal_id.like(f"%{internal_id_pattern}%"))
return samples.filter(Sample.internal_id.contains(internal_id_pattern))


def filter_samples_by_internal_id_or_name_search(
Expand All @@ -134,8 +134,8 @@ def filter_samples_by_internal_id_or_name_search(
"""Return samples matching the internal id or name search."""
return samples.filter(
or_(
Sample.name.like(f"%{search_pattern}%"),
Sample.internal_id.like(f"%{search_pattern}%"),
Sample.name.contains(search_pattern),
Sample.internal_id.contains(search_pattern),
)
)

Expand Down
10 changes: 10 additions & 0 deletions cg/store/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,14 @@ def to_dict(model_instance):
UniqueConstraint("customer_id", "collaboration_id", name="_customer_collaboration_uc"),
)

order_case = Table(
"order_case",
Model.metadata,
Column("order_id", ForeignKey("order.id", ondelete="CASCADE"), nullable=False),
Column("case_id", ForeignKey("case.id", ondelete="CASCADE"), nullable=False),
UniqueConstraint("order_id", "case_id", name="_order_case_uc"),
)


class PriorityMixin:
@property
Expand Down Expand Up @@ -403,6 +411,7 @@ class Case(Model, PriorityMixin):

analyses = orm.relationship(Analysis, back_populates="case", order_by="-Analysis.completed_at")
links = orm.relationship("CaseSample", back_populates="case")
orders = orm.relationship("Order", secondary=order_case, back_populates="cases")

@property
def cohorts(self) -> list[str]:
Expand Down Expand Up @@ -882,6 +891,7 @@ class Order(Model):
__tablename__ = "order"

id = Column(types.Integer, primary_key=True, unique=True)
cases = orm.relationship("Case", secondary=order_case, back_populates="orders")
customer_id = Column(ForeignKey("customer.id"), nullable=False)
customer = orm.relationship(Customer, foreign_keys=[customer_id])
order_date = Column(types.DateTime, nullable=False, default=dt.datetime.now())
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"

[tool.poetry]
name = "cg"
version = "58.1.9"
version = "59.2.1"
description = "Clinical Genomics command center"
authors = ["Clinical Genomics <[email protected]>"]
readme = "README.md"
Expand Down
5 changes: 3 additions & 2 deletions tests/meta/upload/scout/test_meta_upload_scoutapi_rna.py
Original file line number Diff line number Diff line change
Expand Up @@ -580,7 +580,7 @@ def test_add_rna_sample(
# GIVEN an RNA case and the associated RNA samples
rna_case: Case = rna_store.get_case_by_internal_id(internal_id=rna_case_id)
rna_sample_list: list[Sample] = (
rna_store._get_query(table=Sample).filter(Sample.internal_id.like("rna")).all()
rna_store._get_query(table=Sample).filter(Sample.internal_id.contains("rna")).all()
)

# WHEN running the method to create a list of RNADNACollections
Expand All @@ -590,9 +590,10 @@ def test_add_rna_sample(
)

# THEN the resulting RNADNACollections should contain all RNA samples in the case
assert rna_sample_list
for sample in rna_sample_list:
assert sample.internal_id in [
rna_dna_collection.rna_sample_id for rna_dna_collection in rna_dna_collections
rna_dna_collection.rna_sample_internal_id for rna_dna_collection in rna_dna_collections
]


Expand Down

0 comments on commit 626daa9

Please sign in to comment.