Skip to content

Commit

Permalink
Merge branch 'master' into refactor-sql-deletehandler-and-updatehandler
Browse files Browse the repository at this point in the history
  • Loading branch information
henrikstranneheim authored Jan 11, 2024
2 parents 0691d5a + 571fade commit 4b4cf68
Show file tree
Hide file tree
Showing 58 changed files with 1,657 additions and 1,550 deletions.
2 changes: 1 addition & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 54.10.4
current_version = 55.2.2
commit = True
tag = True
tag_name = v{new_version}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""remove_uploaded_to_vogue_at
Revision ID: 584840c706a0
Revises: 27ec5c4c0380
Create Date: 2023-12-27 11:50:22.278213
"""
import sqlalchemy as sa

from alembic import op

# revision identifiers, used by Alembic.
revision = "584840c706a0"
down_revision = "27ec5c4c0380"
branch_labels = None
depends_on = None


def upgrade():
op.drop_column("analysis", "uploaded_to_vogue_at")


def downgrade():
op.add_column("analysis", sa.Column("uploaded_to_vogue_at", sa.DateTime(), nullable=True))
2 changes: 1 addition & 1 deletion cg/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
__title__ = "cg"
__version__ = "54.10.4"
__version__ = "55.2.2"
17 changes: 8 additions & 9 deletions cg/apps/crunchy/crunchy.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,25 +64,24 @@ def is_compression_pending(compression_obj: CompressionData) -> bool:
def is_fastq_compression_possible(compression_obj: CompressionData) -> bool:
"""Check if FASTQ compression is possible.
There are three possible answers to this question:
- Compression is running -> Compression NOT possible
- SPRING archive exists -> Compression NOT possible
- Data is external -> Compression NOT possible
- Not compressed and not running -> Compression IS possible
- Compression is running -> Compression NOT possible
- SPRING file exists on Hasta -> Compression NOT possible
- Data is external -> Compression NOT possible
- Not compressed and
not running -> Compression IS possible
"""
if CrunchyAPI.is_compression_pending(compression_obj):
return False

if compression_obj.spring_exists():
LOG.info("SPRING file found")
LOG.debug("SPRING file found")
return False

if "external-data" in str(compression_obj.fastq_first):
LOG.info("File is external data and should not be compressed")
LOG.debug("File is external data and should not be compressed")
return False

LOG.info("FASTQ compression is possible")
LOG.debug("FASTQ compression is possible")

return True

Expand Down
1 change: 1 addition & 0 deletions cg/apps/lims/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ def _export_sample(self, lims_sample):
else None
),
"comment": udfs.get("comment"),
"concentration_ng_ul": udfs.get("Concentration (ng/ul)"),
}

def get_received_date(self, lims_id: str) -> dt.date:
Expand Down
14 changes: 0 additions & 14 deletions cg/apps/tb/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,20 +91,6 @@ def is_latest_analysis_completed(self, case_id: str) -> bool:
def is_latest_analysis_qc(self, case_id: str) -> bool:
return self.get_latest_analysis_status(case_id=case_id) == AnalysisStatus.QC

def mark_analyses_deleted(self, case_id: str) -> list | None:
"""Mark all analyses for case deleted without removing analysis files"""
request_body = {
"case_id": case_id,
}
response = self.query_trailblazer(
command="mark-analyses-deleted", request_body=request_body
)
if response:
if isinstance(response, list):
return [TrailblazerAnalysis.model_validate(analysis) for analysis in response]
if isinstance(response, dict):
return [TrailblazerAnalysis.model_validate(response)]

def add_pending_analysis(
self,
case_id: str,
Expand Down
17 changes: 9 additions & 8 deletions cg/cli/upload/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
from cg.constants import Pipeline
from cg.exc import AnalysisAlreadyUploadedError
from cg.meta.upload.balsamic.balsamic import BalsamicUploadAPI
from cg.meta.upload.microsalt.microsalt_upload_api import MicrosaltUploadAPI
from cg.meta.upload.mip.mip_dna import MipDNAUploadAPI
from cg.meta.upload.mip.mip_rna import MipRNAUploadAPI
from cg.meta.upload.rnafusion.rnafusion import RnafusionUploadAPI
Expand All @@ -57,29 +58,29 @@ def upload(context: click.Context, case_id: str | None, restart: bool):
"""Upload results from analyses"""

config_object: CGConfig = context.obj
upload_api: UploadAPI = MipDNAUploadAPI(config=config_object) # default upload API
upload_api: UploadAPI = MipDNAUploadAPI(config_object)

LOG.info("----------------- UPLOAD -----------------")

if context.invoked_subcommand is not None:
context.obj.meta_apis["upload_api"] = upload_api
elif case_id: # Provided case ID without a subcommand: upload everything
try:
upload_api.analysis_api.status_db.verify_case_exists(case_internal_id=case_id)
case: Case = upload_api.status_db.get_case_by_internal_id(internal_id=case_id)
upload_api.analysis_api.status_db.verify_case_exists(case_id)
case: Case = upload_api.status_db.get_case_by_internal_id(case_id)
upload_api.verify_analysis_upload(case_obj=case, restart=restart)
except AnalysisAlreadyUploadedError:
# Analysis being uploaded or it has been already uploaded
return

# Update the upload API based on the data analysis type (MIP-DNA by default)
# Upload for balsamic, balsamic-umi and balsamic-qc
if Pipeline.BALSAMIC in case.data_analysis:
upload_api = BalsamicUploadAPI(config=config_object)
upload_api = BalsamicUploadAPI(config_object)
elif case.data_analysis == Pipeline.RNAFUSION:
upload_api = RnafusionUploadAPI(config=config_object)
upload_api = RnafusionUploadAPI(config_object)
elif case.data_analysis == Pipeline.MIP_RNA:
upload_api: UploadAPI = MipRNAUploadAPI(config=config_object)
upload_api = MipRNAUploadAPI(config_object)
elif case.data_analysis == Pipeline.MICROSALT:
upload_api = MicrosaltUploadAPI(config_object)

context.obj.meta_apis["upload_api"] = upload_api
upload_api.upload(ctx=context, case=case, restart=restart)
Expand Down
9 changes: 1 addition & 8 deletions cg/constants/delivery.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,14 +124,7 @@
{"salmon-quant"},
]

MICROSALT_ANALYSIS_CASE_TAGS: list[set[str]] = [
{"microsalt-qc"},
{"microsalt-type"},
{"assembly"},
{"trimmed-forward-reads"},
{"trimmed-reverse-reads"},
{"reference-alignment-deduplicated"},
]
MICROSALT_ANALYSIS_CASE_TAGS = [{"qc-report"}, {"typing-report"}]

MICROSALT_ANALYSIS_SAMPLE_TAGS: list[set[str]] = []

Expand Down
1 change: 1 addition & 0 deletions cg/constants/lims.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
"comment": "Comment",
"control": "Control",
"concentration": "Concentration (nM)",
"concentration_ng_ul": "Concentration (ng/ul)",
"concentration_sample": "Sample Conc.",
"customer": "customer",
"data_analysis": "Data Analysis",
Expand Down
5 changes: 5 additions & 0 deletions cg/constants/scout.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,11 @@ class ScoutExportFileName(StrEnum):
PANELS: str = f"gene_panels{FileExtensions.BED}"


class UploadTrack(StrEnum):
RARE_DISEASE: str = "rare"
CANCER: str = "cancer"


class ScoutCustomCaseReportTags(StrEnum):
DELIVERY: str = "delivery_report"
CNV: str = "cnv_report"
Expand Down
31 changes: 20 additions & 11 deletions cg/meta/compress/compress.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from cg.constants import SequencingFileTag
from cg.meta.backup.backup import SpringBackupAPI
from cg.meta.compress import files
from cg.models import CompressionData, FileData
from cg.models import CompressionData
from cg.store.models import Sample

LOG = logging.getLogger(__name__)
Expand Down Expand Up @@ -73,16 +73,11 @@ def compress_fastq(self, sample_id: str) -> bool:
for run_name in sample_fastq:
LOG.info(f"Check if compression possible for run {run_name}")
compression: CompressionData = sample_fastq[run_name]["compression_data"]
if FileData.is_empty(compression.fastq_first):
LOG.warning(f"Fastq files are empty for {sample_id}: {compression.fastq_first}")
self.delete_fastq_housekeeper(
hk_fastq_first=sample_fastq[run_name]["hk_first"],
hk_fastq_second=sample_fastq[run_name]["hk_second"],
)
all_ok = False
continue

if not self.crunchy_api.is_fastq_compression_possible(compression_obj=compression):
is_compression_possible: bool = self._is_fastq_compression_possible(
compression=compression,
sample_id=sample_id,
)
if not is_compression_possible:
LOG.warning(f"FASTQ to SPRING not possible for {sample_id}, run {run_name}")
all_ok = False
continue
Expand All @@ -93,6 +88,20 @@ def compress_fastq(self, sample_id: str) -> bool:
self.crunchy_api.fastq_to_spring(compression_obj=compression, sample_id=sample_id)
return all_ok

def _is_fastq_compression_possible(self, compression: CompressionData, sample_id: str) -> bool:
if self._is_spring_archived(compression):
LOG.debug(f"Found archived Spring file for {sample_id} - compression not possible")
return False
return self.crunchy_api.is_fastq_compression_possible(compression_obj=compression)

def _is_spring_archived(self, compression_data: CompressionData) -> bool:
spring_file: File | None = self.hk_api.get_file_insensitive_path(
path=compression_data.spring_path
)
if (not spring_file) or (not spring_file.archive):
return False
return bool(spring_file.archive.archived_at)

def decompress_spring(self, sample_id: str) -> bool:
"""Decompress SPRING archive for a sample.
Expand Down
2 changes: 1 addition & 1 deletion cg/meta/orders/case_submitter.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from cg.meta.orders.submitter import Submitter
from cg.models.orders.order import OrderIn
from cg.models.orders.samples import Of1508Sample, OrderInSample
from cg.store.models import ApplicationVersion, Customer, Case, CaseSample, Sample
from cg.store.models import ApplicationVersion, Case, CaseSample, Customer, Sample

LOG = logging.getLogger(__name__)

Expand Down
4 changes: 2 additions & 2 deletions cg/meta/report/balsamic.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,10 +156,10 @@ def get_variant_caller_version(var_caller_name: str, var_caller_versions: dict)
return versions[0]
return None

def get_report_accreditation(
def is_report_accredited(
self, samples: list[SampleModel], analysis_metadata: BalsamicAnalysis
) -> bool:
"""Checks if the report is accredited or not."""
"""Check if the Balsamic report is accredited."""
if analysis_metadata.config.analysis.sequencing_type == "targeted" and next(
(
panel
Expand Down
4 changes: 2 additions & 2 deletions cg/meta/report/mip_dna.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,10 +79,10 @@ def get_genome_build(self, analysis_metadata: MipAnalysis) -> str:
"""Return build version of the genome reference of a specific case."""
return analysis_metadata.genome_build

def get_report_accreditation(
def is_report_accredited(
self, samples: list[SampleModel], analysis_metadata: MipAnalysis = None
) -> bool:
"""Checks if the report is accredited or not by evaluating each of the sample process accreditations."""
"""Check if the MIP-DNA report is accredited by evaluating each of the sample process accreditations."""
for sample in samples:
if not sample.application.accredited:
return False
Expand Down
6 changes: 3 additions & 3 deletions cg/meta/report/report_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def get_report_data(self, case_id: str, analysis_date: datetime) -> ReportModel:
version=self.get_report_version(analysis=analysis),
date=datetime.today(),
case=case_model,
accredited=self.get_report_accreditation(
accredited=self.is_report_accredited(
samples=case_model.samples, analysis_metadata=analysis_metadata
),
)
Expand Down Expand Up @@ -407,10 +407,10 @@ def get_variant_callers(self, _analysis_metadata: AnalysisModel) -> list:
"""Return list of variant-calling filters used during analysis."""
return []

def get_report_accreditation(
def is_report_accredited(
self, samples: list[SampleModel], analysis_metadata: AnalysisModel
) -> bool:
"""Checks if the report is accredited or not."""
"""Check if the report is accredited."""
raise NotImplementedError

def get_required_fields(self, case: CaseModel) -> dict:
Expand Down
4 changes: 2 additions & 2 deletions cg/meta/report/rnafusion.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,10 +79,10 @@ def get_genome_build(self, analysis_metadata: AnalysisModel) -> str:
"""Return build version of the genome reference of a specific case."""
return GenomeVersion.hg38.value

def get_report_accreditation(
def is_report_accredited(
self, samples: list[SampleModel], analysis_metadata: AnalysisModel
) -> bool:
"""Checks if the report is accredited or not. Rnafusion is an accredited workflow."""
"""Check if the report is accredited. Rnafusion is an accredited workflow."""
return True

def get_template_name(self) -> str:
Expand Down
25 changes: 25 additions & 0 deletions cg/meta/upload/microsalt/microsalt_upload_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import logging

import click

from cg.cli.upload.clinical_delivery import upload_clinical_delivery
from cg.meta.upload.upload_api import UploadAPI
from cg.meta.workflow.microsalt.microsalt import MicrosaltAnalysisAPI
from cg.models.cg_config import CGConfig
from cg.store.models import Analysis, Case

LOG = logging.getLogger(__name__)


class MicrosaltUploadAPI(UploadAPI):
def __init__(self, config: CGConfig):
self.analysis_api = MicrosaltAnalysisAPI(config)
super().__init__(config=config, analysis_api=self.analysis_api)

def upload(self, ctx: click.Context, case: Case, restart: bool) -> None:
"""Uploads MIP-DNA analysis data and files."""
analysis: Analysis = case.analyses[0]
self.update_upload_started_at(analysis)

ctx.invoke(upload_clinical_delivery, case_id=case.internal_id)
self.update_uploaded_at(analysis=analysis)
9 changes: 6 additions & 3 deletions cg/meta/upload/scout/balsamic_config_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@

from cg.apps.lims import LimsAPI
from cg.constants.constants import SampleType
from cg.constants.scout import BALSAMIC_CASE_TAGS, BALSAMIC_SAMPLE_TAGS
from cg.constants.housekeeper_tags import HK_DELIVERY_REPORT_TAG
from cg.constants.scout import BALSAMIC_CASE_TAGS, BALSAMIC_SAMPLE_TAGS, UploadTrack
from cg.constants.subject import PhenotypeStatus
from cg.meta.upload.scout.hk_tags import CaseTags, SampleTags
from cg.meta.upload.scout.scout_config_builder import ScoutConfigBuilder
Expand All @@ -22,7 +23,10 @@ def __init__(self, hk_version_obj: Version, analysis_obj: Analysis, lims_api: Li
)
self.case_tags: CaseTags = CaseTags(**BALSAMIC_CASE_TAGS)
self.sample_tags: SampleTags = SampleTags(**BALSAMIC_SAMPLE_TAGS)
self.load_config: BalsamicLoadConfig = BalsamicLoadConfig(track="cancer")
self.load_config: BalsamicLoadConfig = BalsamicLoadConfig(
track=UploadTrack.CANCER.value,
delivery_report=self.get_file_from_hk({HK_DELIVERY_REPORT_TAG}),
)

def include_case_files(self):
LOG.info("Including BALSAMIC specific case level files")
Expand All @@ -34,7 +38,6 @@ def include_case_files(self):
)
self.include_cnv_report()
self.include_multiqc_report()
self.include_delivery_report()

def include_sample_files(self, config_sample: ScoutCancerIndividual) -> None:
LOG.info("Including BALSAMIC specific sample level files.")
Expand Down
16 changes: 8 additions & 8 deletions cg/meta/upload/scout/balsamic_umi_config_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,11 @@
from housekeeper.store.models import Version

from cg.apps.lims import LimsAPI
from cg.constants.scout import BALSAMIC_UMI_CASE_TAGS, BALSAMIC_UMI_SAMPLE_TAGS
from cg.constants.housekeeper_tags import HK_DELIVERY_REPORT_TAG
from cg.constants.scout import BALSAMIC_UMI_CASE_TAGS, BALSAMIC_UMI_SAMPLE_TAGS, UploadTrack
from cg.meta.upload.scout.balsamic_config_builder import BalsamicConfigBuilder
from cg.meta.upload.scout.hk_tags import CaseTags, SampleTags
from cg.models.scout.scout_load_config import (
BalsamicUmiLoadConfig,
ScoutCancerIndividual,
)
from cg.models.scout.scout_load_config import BalsamicUmiLoadConfig, ScoutCancerIndividual
from cg.store.models import Analysis, Sample

LOG = logging.getLogger(__name__)
Expand All @@ -22,12 +20,14 @@ def __init__(self, hk_version_obj: Version, analysis_obj: Analysis, lims_api: Li
)
self.case_tags: CaseTags = CaseTags(**BALSAMIC_UMI_CASE_TAGS)
self.sample_tags: SampleTags = SampleTags(**BALSAMIC_UMI_SAMPLE_TAGS)
self.load_config: BalsamicUmiLoadConfig = BalsamicUmiLoadConfig(track="cancer")
self.load_config: BalsamicUmiLoadConfig = BalsamicUmiLoadConfig(
track=UploadTrack.CANCER.value,
delivery_report=self.get_file_from_hk({HK_DELIVERY_REPORT_TAG}),
)

def include_sample_files(self, config_sample: ScoutCancerIndividual) -> None:
LOG.info("Including BALSAMIC specific sample level files")

def get_balsamic_analysis_type(self, sample: Sample) -> str:
"""Returns a formatted balsamic analysis type"""

"""Returns a formatted balsamic analysis type."""
return "panel-umi"
Loading

0 comments on commit 4b4cf68

Please sign in to comment.