Skip to content

Commit

Permalink
Merge branch 'master' into automatic-delivery-of-microsalt
Browse files Browse the repository at this point in the history
  • Loading branch information
seallard authored Jan 11, 2024
2 parents 4004edf + 2172cdc commit 0620f4f
Show file tree
Hide file tree
Showing 71 changed files with 1,767 additions and 1,598 deletions.
2 changes: 1 addition & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 54.9.1
current_version = 55.2.0
commit = True
tag = True
tag_name = v{new_version}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""remove_uploaded_to_vogue_at
Revision ID: 584840c706a0
Revises: 27ec5c4c0380
Create Date: 2023-12-27 11:50:22.278213
"""
import sqlalchemy as sa

from alembic import op

# revision identifiers, used by Alembic.
revision = "584840c706a0"
down_revision = "27ec5c4c0380"
branch_labels = None
depends_on = None


def upgrade():
op.drop_column("analysis", "uploaded_to_vogue_at")


def downgrade():
op.add_column("analysis", sa.Column("uploaded_to_vogue_at", sa.DateTime(), nullable=True))
2 changes: 1 addition & 1 deletion cg/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
__title__ = "cg"
__version__ = "54.9.1"
__version__ = "55.2.0"
17 changes: 8 additions & 9 deletions cg/apps/crunchy/crunchy.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,25 +64,24 @@ def is_compression_pending(compression_obj: CompressionData) -> bool:
def is_fastq_compression_possible(compression_obj: CompressionData) -> bool:
"""Check if FASTQ compression is possible.
There are three possible answers to this question:
- Compression is running -> Compression NOT possible
- SPRING archive exists -> Compression NOT possible
- Data is external -> Compression NOT possible
- Not compressed and not running -> Compression IS possible
- Compression is running -> Compression NOT possible
- SPRING file exists on Hasta -> Compression NOT possible
- Data is external -> Compression NOT possible
- Not compressed and
not running -> Compression IS possible
"""
if CrunchyAPI.is_compression_pending(compression_obj):
return False

if compression_obj.spring_exists():
LOG.info("SPRING file found")
LOG.debug("SPRING file found")
return False

if "external-data" in str(compression_obj.fastq_first):
LOG.info("File is external data and should not be compressed")
LOG.debug("File is external data and should not be compressed")
return False

LOG.info("FASTQ compression is possible")
LOG.debug("FASTQ compression is possible")

return True

Expand Down
1 change: 1 addition & 0 deletions cg/apps/lims/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ def _export_sample(self, lims_sample):
else None
),
"comment": udfs.get("comment"),
"concentration_ng_ul": udfs.get("Concentration (ng/ul)"),
}

def get_received_date(self, lims_id: str) -> dt.date:
Expand Down
14 changes: 0 additions & 14 deletions cg/apps/tb/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,20 +91,6 @@ def is_latest_analysis_completed(self, case_id: str) -> bool:
def is_latest_analysis_qc(self, case_id: str) -> bool:
return self.get_latest_analysis_status(case_id=case_id) == AnalysisStatus.QC

def mark_analyses_deleted(self, case_id: str) -> list | None:
"""Mark all analyses for case deleted without removing analysis files"""
request_body = {
"case_id": case_id,
}
response = self.query_trailblazer(
command="mark-analyses-deleted", request_body=request_body
)
if response:
if isinstance(response, list):
return [TrailblazerAnalysis.model_validate(analysis) for analysis in response]
if isinstance(response, dict):
return [TrailblazerAnalysis.model_validate(response)]

def add_pending_analysis(
self,
case_id: str,
Expand Down
3 changes: 2 additions & 1 deletion cg/cli/demultiplex/demux.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,8 @@ def demultiplex_all(context: CGConfig, flow_cells_directory: click.Path, dry_run

if not flow_cell.validate_sample_sheet():
LOG.warning(
f"Malformed sample sheet. Run cg demultiplex samplesheet validate {flow_cell.sample_sheet_path}",
"Malformed sample sheet. "
f"Run cg demultiplex sample sheet validate {flow_cell.sample_sheet_path}",
)
continue

Expand Down
1 change: 1 addition & 0 deletions cg/constants/gene_panel.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ class GenePanelMasterList(StrEnum):
NEURODEG: str = "NEURODEG"
NMD: str = "NMD"
OMIM_AUTO: str = "OMIM-AUTO"
OPTIC: str = "OPTIC"
PANELAPP_GREEN: str = "PANELAPP-GREEN"
PEDHEP: str = "PEDHEP"
PID: str = "PID"
Expand Down
1 change: 1 addition & 0 deletions cg/constants/lims.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
"comment": "Comment",
"control": "Control",
"concentration": "Concentration (nM)",
"concentration_ng_ul": "Concentration (ng/ul)",
"concentration_sample": "Sample Conc.",
"customer": "customer",
"data_analysis": "Data Analysis",
Expand Down
5 changes: 5 additions & 0 deletions cg/constants/scout.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,11 @@ class ScoutExportFileName(StrEnum):
PANELS: str = f"gene_panels{FileExtensions.BED}"


class UploadTrack(StrEnum):
RARE_DISEASE: str = "rare"
CANCER: str = "cancer"


class ScoutCustomCaseReportTags(StrEnum):
DELIVERY: str = "delivery_report"
CNV: str = "cnv_report"
Expand Down
52 changes: 32 additions & 20 deletions cg/meta/archive/archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,11 +40,11 @@ def __init__(
self.status_db: Store = status_db
self.data_flow_config: DataFlowConfig = data_flow_config

def archive_files_to_location(
self, files_and_samples: list[FileAndSample], archive_location: ArchiveLocations
def archive_file_to_location(
self, file_and_sample: FileAndSample, archive_location: ArchiveLocations
) -> int:
archive_handler: ArchiveHandler = ARCHIVE_HANDLERS[archive_location](self.data_flow_config)
return archive_handler.archive_files(files_and_samples=files_and_samples)
return archive_handler.archive_file(file_and_sample=file_and_sample)

def archive_spring_files_and_add_archives_to_housekeeper(
self, spring_file_count_limit: int | None
Expand All @@ -55,25 +55,37 @@ def archive_spring_files_and_add_archives_to_housekeeper(
LOG.warning("Please do not provide a non-positive integer as limit - exiting.")
return
for archive_location in ArchiveLocations:
files_to_archive: list[File] = self.housekeeper_api.get_non_archived_spring_files(
tags=[archive_location],
limit=spring_file_count_limit,
self.archive_files_to_location(
archive_location=archive_location, file_limit=spring_file_count_limit
)
if files_to_archive:
files_and_samples_for_location = self.add_samples_to_files(files_to_archive)
job_id = self.archive_files_to_location(
files_and_samples=files_and_samples_for_location,
archive_location=archive_location,
)
LOG.info(f"Files submitted to {archive_location} with archival task id {job_id}.")
self.housekeeper_api.add_archives(
files=[
file_and_sample.file for file_and_sample in files_and_samples_for_location
],
archive_task_id=job_id,

def archive_files_to_location(self, archive_location: str, file_limit: int | None) -> None:
"""Archives up to spring file count limit number of files to the provided archive location."""
files_to_archive: list[File] = self.housekeeper_api.get_non_archived_spring_files(
tags=[archive_location],
limit=file_limit,
)
if files_to_archive:
files_and_samples_for_location = self.add_samples_to_files(files_to_archive)
for file_and_sample in files_and_samples_for_location:
self.archive_file(
file_and_sample=file_and_sample, archive_location=archive_location
)
else:
LOG.info(f"No files to archive for location {archive_location}.")

else:
LOG.info(f"No files to archive for location {archive_location}.")

def archive_file(
self, file_and_sample: FileAndSample, archive_location: ArchiveLocations
) -> None:
job_id: int = self.archive_file_to_location(
file_and_sample=file_and_sample, archive_location=archive_location
)
LOG.info(f"File submitted to {archive_location} with archival task id {job_id}.")
self.housekeeper_api.add_archives(
files=[file_and_sample.file],
archive_task_id=job_id,
)

def retrieve_case(self, case_id: str) -> None:
"""Submits jobs to retrieve any archived files belonging to the given case, and updates the Archive entries
Expand Down
12 changes: 12 additions & 0 deletions cg/meta/archive/ddn/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,3 +49,15 @@ class JobStatus(StrEnum):
JobStatus.ON_VALIDATION,
JobStatus.RUNNING,
]

METADATA_LIST = "metadataList"


class MetadataFields(StrEnum):
CUSTOMER_NAME: str = "customer_name"
PREP_CATEGORY: str = "prep_category"
SAMPLE_NAME: str = "sample_name"
SEQUENCED_AT: str = "sequenced_at"
TICKET_NUMBER: str = "ticket_number"
NAME = "metadataName"
VALUE = "value"
29 changes: 15 additions & 14 deletions cg/meta/archive/ddn/ddn_data_flow_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
RefreshPayload,
TransferPayload,
)
from cg.meta.archive.ddn.utils import get_metadata
from cg.meta.archive.models import ArchiveHandler, FileAndSample
from cg.models.cg_config import DataFlowConfig

Expand Down Expand Up @@ -100,14 +101,15 @@ def auth_header(self) -> dict[str, str]:
self._refresh_auth_token()
return {"Authorization": f"Bearer {self.auth_token}"}

def archive_files(self, files_and_samples: list[FileAndSample]) -> int:
def archive_file(self, file_and_sample: FileAndSample) -> int:
"""Archives all files provided, to their corresponding destination, as given by sources
and destination in TransferData. Returns the job ID of the archiving task."""
miria_file_data: list[MiriaObject] = self.convert_into_transfer_data(
files_and_samples, is_archiving=True
[file_and_sample], is_archiving=True
)
metadata: list[dict] = get_metadata(file_and_sample.sample)
archival_request: TransferPayload = self.create_transfer_request(
miria_file_data=miria_file_data, is_archiving_request=True
miria_file_data=miria_file_data, is_archiving_request=True, metadata=metadata
)
return archival_request.post_request(
headers=dict(self.headers, **self.auth_header),
Expand All @@ -117,15 +119,9 @@ def archive_files(self, files_and_samples: list[FileAndSample]) -> int:
def retrieve_files(self, files_and_samples: list[FileAndSample]) -> int:
"""Retrieves the provided files and stores them in the corresponding sample bundle in
Housekeeper."""
miria_file_data: list[MiriaObject] = []
for file_and_sample in files_and_samples:
LOG.info(
f"Will retrieve file {file_and_sample.file.path} for sample {file_and_sample.sample.internal_id} via Miria."
)
miria_object: MiriaObject = MiriaObject.create_from_file_and_sample(
file=file_and_sample.file, sample=file_and_sample.sample, is_archiving=False
)
miria_file_data.append(miria_object)
miria_file_data: list[MiriaObject] = self.convert_into_transfer_data(
files_and_samples=files_and_samples, is_archiving=False
)
retrieval_request: TransferPayload = self.create_transfer_request(
miria_file_data=miria_file_data, is_archiving_request=False
)
Expand All @@ -135,7 +131,10 @@ def retrieve_files(self, files_and_samples: list[FileAndSample]) -> int:
).job_id

def create_transfer_request(
self, miria_file_data: list[MiriaObject], is_archiving_request: bool
self,
miria_file_data: list[MiriaObject],
is_archiving_request: bool,
metadata: list[dict] = [],
) -> TransferPayload:
"""Performs the necessary curation of paths for the request to be valid, depending on if
it is an archiving or a retrieve request.
Expand All @@ -151,7 +150,9 @@ def create_transfer_request(
)

transfer_request = TransferPayload(
files_to_transfer=miria_file_data, createFolder=is_archiving_request
files_to_transfer=miria_file_data,
createFolder=is_archiving_request,
metadataList=metadata,
)
transfer_request.trim_paths(attribute_to_trim=attribute)
transfer_request.add_repositories(
Expand Down
3 changes: 1 addition & 2 deletions cg/meta/archive/ddn/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ def get_request_log(headers: dict, body: dict):
class MiriaObject(FileTransferData):
"""Model for representing a singular object transfer."""

_metadata = None
destination: str
source: str

Expand Down Expand Up @@ -58,6 +57,7 @@ class TransferPayload(BaseModel):
osType: str = OSTYPE
createFolder: bool = True
settings: list[dict] = []
metadataList: list[dict] = []

def trim_paths(self, attribute_to_trim: str):
"""Trims the source path from its root directory for all objects in the transfer."""
Expand All @@ -76,7 +76,6 @@ def model_dump(self, **kwargs) -> dict:
"""Creates a correctly structured dict to be used as the request payload."""
payload: dict = super().model_dump(exclude={"files_to_transfer"})
payload["pathInfo"] = [miria_file.model_dump() for miria_file in self.files_to_transfer]
payload["metadataList"] = []
return payload

def post_request(self, url: str, headers: dict) -> "TransferResponse":
Expand Down
29 changes: 29 additions & 0 deletions cg/meta/archive/ddn/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
from cg.meta.archive.ddn.constants import MetadataFields
from cg.store.models import Sample


def get_metadata(sample: Sample) -> list[dict]:
"""Returns metadata generated from a sample."""
metadata: list[dict] = [
{
MetadataFields.NAME.value: MetadataFields.CUSTOMER_NAME.value,
MetadataFields.VALUE.value: sample.customer.name,
},
{
MetadataFields.NAME.value: MetadataFields.PREP_CATEGORY.value,
MetadataFields.VALUE.value: sample.prep_category,
},
{
MetadataFields.NAME.value: MetadataFields.SAMPLE_NAME.value,
MetadataFields.VALUE.value: sample.name,
},
{
MetadataFields.NAME.value: MetadataFields.SEQUENCED_AT.value,
MetadataFields.VALUE.value: sample.last_sequenced_at,
},
{
MetadataFields.NAME.value: MetadataFields.TICKET_NUMBER.value,
MetadataFields.VALUE.value: sample.original_ticket,
},
]
return metadata
2 changes: 1 addition & 1 deletion cg/meta/archive/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def __init__(self, config: DataFlowConfig):
pass

@abstractmethod
def archive_files(self, files_and_samples: list[FileAndSample]):
def archive_file(self, file_and_sample: FileAndSample):
"""Archives all folders provided, to their corresponding destination,
as given by sources and destination parameter."""
pass
Expand Down
Loading

0 comments on commit 0620f4f

Please sign in to comment.