Skip to content

Commit

Permalink
Rename family relationships to case (#2666)(patch)
Browse files Browse the repository at this point in the history
  • Loading branch information
seallard authored Nov 7, 2023
1 parent c930bd0 commit 0c31ec5
Show file tree
Hide file tree
Showing 61 changed files with 173 additions and 184 deletions.
4 changes: 2 additions & 2 deletions cg/cli/clean.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def hk_bundle_files(
size_cleaned: int = 0
for analysis in analyses:
LOG.info(f"Cleaning analysis {analysis}")
bundle_name: str = analysis.family.internal_id
bundle_name: str = analysis.case.internal_id
hk_bundle_version: Optional[Version] = housekeeper_api.version(
bundle=bundle_name, date=analysis.started_at
)
Expand All @@ -230,7 +230,7 @@ def hk_bundle_files(
f"date {analysis.started_at}"
)
version_files: list[File] = housekeeper_api.get_files(
bundle=analysis.family.internal_id, tags=tags, version=hk_bundle_version.id
bundle=analysis.case.internal_id, tags=tags, version=hk_bundle_version.id
).all()
for version_file in version_files:
file_path: Path = Path(version_file.full_path)
Expand Down
2 changes: 1 addition & 1 deletion cg/cli/delete/case.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def _log_sample_process_information(sample: Sample):

def _log_sample_links(sample: Sample):
for sample_link in sample.links:
LOG.info(f"Sample is linked to: {sample_link.family.internal_id}")
LOG.info(f"Sample is linked to: {sample_link.case.internal_id}")
for sample_link in sample.mother_links:
LOG.info(f"Sample is linked as mother to: {sample_link.mother.internal_id}")
for sample_link in sample.father_links:
Expand Down
2 changes: 1 addition & 1 deletion cg/cli/delete/cases.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def _get_cases(identifiers: click.Tuple([str, str]), store: Store) -> [Case]:
_cases = set()
for sample in samples_by_id:
for link in sample.links:
_cases.add(link.family)
_cases.add(link.case)

return _cases

Expand Down
4 changes: 1 addition & 3 deletions cg/cli/get.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,9 +61,7 @@ def get_sample(context: click.Context, cases: bool, hide_flow_cell: bool, sample
]
click.echo(tabulate([row], headers=SAMPLE_HEADERS, tablefmt="psql"))
if cases:
case_ids: list[str] = [
link_obj.family.internal_id for link_obj in existing_sample.links
]
case_ids: list[str] = [link_obj.case.internal_id for link_obj in existing_sample.links]
context.invoke(get_case, case_ids=case_ids, samples=False)
if not hide_flow_cell:
for sample_flow_cell in existing_sample.flowcells:
Expand Down
2 changes: 1 addition & 1 deletion cg/cli/set/cases.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _get_cases(identifiers: click.Tuple([str, str]), store: Store) -> list[Case]
cases: set[Case] = set()
for sample in samples_by_id:
for link in sample.links:
cases.add(link.family)
cases.add(link.case)

return list(cases)

Expand Down
8 changes: 4 additions & 4 deletions cg/cli/upload/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,14 +102,14 @@ def upload_all_completed_analyses(context: click.Context, pipeline: Pipeline = N

exit_code = 0
for analysis_obj in status_db.get_analyses_to_upload(pipeline=pipeline):
if analysis_obj.family.analyses[0].uploaded_at is not None:
if analysis_obj.case.analyses[0].uploaded_at is not None:
LOG.warning(
f"Skipping upload for case {analysis_obj.family.internal_id}. "
f"It has been already uploaded at {analysis_obj.family.analyses[0].uploaded_at}."
f"Skipping upload for case {analysis_obj.case.internal_id}. "
f"It has been already uploaded at {analysis_obj.case.analyses[0].uploaded_at}."
)
continue

case_id = analysis_obj.family.internal_id
case_id = analysis_obj.case.internal_id
LOG.info("Uploading analysis for case: %s", case_id)
try:
context.invoke(upload, case_id=case_id)
Expand Down
14 changes: 6 additions & 8 deletions cg/cli/upload/clinical_delivery.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,25 +87,23 @@ def auto_fastq(context: click.Context, dry_run: bool):
status_db: Store = context.obj.status_db
trailblazer_api: TrailblazerAPI = context.obj.trailblazer_api
for analysis_obj in status_db.get_analyses_to_upload(pipeline=Pipeline.FASTQ):
if analysis_obj.family.analyses[0].uploaded_at:
if analysis_obj.case.analyses[0].uploaded_at:
LOG.debug(
f"Newer analysis already uploaded for {analysis_obj.family.internal_id}, skipping"
f"Newer analysis already uploaded for {analysis_obj.case.internal_id}, skipping"
)
continue
if analysis_obj.upload_started_at:
if trailblazer_api.is_latest_analysis_completed(
case_id=analysis_obj.family.internal_id
):
if trailblazer_api.is_latest_analysis_completed(case_id=analysis_obj.case.internal_id):
LOG.info(
f"The upload for {analysis_obj.family.internal_id} is completed, setting uploaded at to {dt.datetime.now()}"
f"The upload for {analysis_obj.case.internal_id} is completed, setting uploaded at to {dt.datetime.now()}"
)
analysis_obj.uploaded_at = dt.datetime.now()
else:
LOG.debug(
f"Upload to clinical-delivery for {analysis_obj.family.internal_id} has already started, skipping"
f"Upload to clinical-delivery for {analysis_obj.case.internal_id} has already started, skipping"
)
continue
case: Case = analysis_obj.family
case: Case = analysis_obj.case
LOG.info(f"Uploading family: {case.internal_id}")
analysis_obj.upload_started_at = dt.datetime.now()
try:
Expand Down
2 changes: 1 addition & 1 deletion cg/cli/upload/nipt/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def nipt_upload_all(context: click.Context, dry_run: bool):
return

for analysis in analyses:
internal_id = analysis.family.internal_id
internal_id = analysis.case.internal_id

if nipt_upload_api.flowcell_passed_qc_value(
case_id=internal_id, q30_threshold=Q30_THRESHOLD
Expand Down
2 changes: 1 addition & 1 deletion cg/cli/upload/nipt/ftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,5 +65,5 @@ def nipt_upload_all(context: click.Context, dry_run: bool):
return

for analysis in analyses:
case_id = analysis.family.internal_id
case_id = analysis.case.internal_id
context.invoke(nipt_upload_case, case_id=case_id, dry_run=dry_run)
2 changes: 1 addition & 1 deletion cg/cli/workflow/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ def past_run_dirs(
LOG.info(f"Cleaning {len(possible_cleanups)} analyses created before {before}")

for analysis in possible_cleanups:
case_id = analysis.family.internal_id
case_id = analysis.case.internal_id
try:
LOG.info("Cleaning %s output for %s", analysis_api.pipeline, case_id)
context.invoke(clean_run_dir, yes=yes, case_id=case_id, dry_run=dry_run)
Expand Down
2 changes: 1 addition & 1 deletion cg/meta/clean/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def get_bundle_files(self, before: datetime, pipeline: Pipeline) -> Iterator[lis
for analysis in self.status_db.get_analyses_for_pipeline_started_at_before(
pipeline=pipeline, started_at_before=before
):
bundle_name = analysis.family.internal_id
bundle_name = analysis.case.internal_id

hk_bundle_version: Optional[Version] = self.housekeeper_api.version(
bundle=bundle_name, date=analysis.started_at
Expand Down
4 changes: 1 addition & 3 deletions cg/meta/observations/observations_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,7 @@ def get_observations_input_files(
"""Fetch input files from a case to upload to Loqusdb."""
analysis: Analysis = case.analyses[0]
analysis_date: datetime = analysis.started_at or analysis.completed_at
hk_version: Version = self.housekeeper_api.version(
analysis.family.internal_id, analysis_date
)
hk_version: Version = self.housekeeper_api.version(analysis.case.internal_id, analysis_date)
return self.extract_observations_files_from_hk(hk_version)

def get_loqusdb_api(self, loqusdb_instance: LoqusdbInstance) -> LoqusdbAPI:
Expand Down
6 changes: 3 additions & 3 deletions cg/meta/report/report_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def get_cases_without_delivery_report(self, pipeline: Pipeline) -> list[Case]:
:MAX_ITEMS_TO_RETRIEVE
]
for analysis_obj in analyses:
case: Case = analysis_obj.family
case: Case = analysis_obj.case
last_version: Version = self.housekeeper_api.last_version(bundle=case.internal_id)
hk_file: File = self.housekeeper_api.get_files(
bundle=case.internal_id, version=last_version.id if last_version else None
Expand All @@ -134,7 +134,7 @@ def get_cases_without_uploaded_delivery_report(self, pipeline: Pipeline) -> list
analyses: Query = self.status_db.analyses_to_upload_delivery_reports(pipeline=pipeline)[
:MAX_ITEMS_TO_RETRIEVE
]
return [analysis_obj.family for analysis_obj in analyses]
return [analysis_obj.case for analysis_obj in analyses]

def update_delivery_report_date(self, case: Case, analysis_date: datetime) -> None:
"""Updates the date when delivery report was created."""
Expand Down Expand Up @@ -201,7 +201,7 @@ def get_report_version(analysis: Analysis) -> int:
"""
version = None
if analysis:
version = len(analysis.family.analyses) - analysis.family.analyses.index(analysis)
version = len(analysis.case.analyses) - analysis.case.analyses.index(analysis)
return version

def get_case_data(
Expand Down
6 changes: 3 additions & 3 deletions cg/meta/upload/coverage.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@ def __init__(self, status_api: Store, hk_api: HousekeeperAPI, chanjo_api: Chanjo

def data(self, analysis_obj: Analysis) -> dict:
"""Get data for uploading coverage."""
family_id = analysis_obj.family.internal_id
data = {"family": family_id, "family_name": analysis_obj.family.name, "samples": []}
for link_obj in analysis_obj.family.links:
family_id = analysis_obj.case.internal_id
data = {"family": family_id, "family_name": analysis_obj.case.name, "samples": []}
for link_obj in analysis_obj.case.links:
analysis_date = analysis_obj.started_at or analysis_obj.completed_at
hk_version = self.hk_api.version(family_id, analysis_date)
hk_coverage = self.hk_api.files(
Expand Down
2 changes: 1 addition & 1 deletion cg/meta/upload/fohm/fohm.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ def link_sample_rawdata_files(self) -> None:
"""Hardlink samples rawdata files to fohm delivery folder."""
for sample_id in self.aggregation_dataframe["internal_id"]:
sample: Sample = self.status_db.get_sample_by_internal_id(internal_id=sample_id)
bundle_name = sample.links[0].family.internal_id
bundle_name = sample.links[0].case.internal_id
version_obj: Version = self.housekeeper_api.last_version(bundle=bundle_name)
files = self.housekeeper_api.files(version=version_obj.id, tags=[sample_id]).all()
for file in files:
Expand Down
6 changes: 3 additions & 3 deletions cg/meta/upload/gt.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,16 +40,16 @@ def data(self, analysis_obj: Analysis) -> dict:
}
"""
case_id = analysis_obj.family.internal_id
case_id = analysis_obj.case.internal_id
LOG.info("Fetching upload genotype data for %s", case_id)
hk_version = self.hk.last_version(case_id)
hk_bcf = self.get_bcf_file(hk_version)
data = {"bcf": hk_bcf.full_path}
if analysis_obj.pipeline in [Pipeline.BALSAMIC, Pipeline.BALSAMIC_UMI]:
data["samples_sex"] = self._get_samples_sex_balsamic(case_obj=analysis_obj.family)
data["samples_sex"] = self._get_samples_sex_balsamic(case_obj=analysis_obj.case)
elif analysis_obj.pipeline == Pipeline.MIP_DNA:
data["samples_sex"] = self._get_samples_sex_mip(
case_obj=analysis_obj.family, hk_version=hk_version
case_obj=analysis_obj.case, hk_version=hk_version
)
else:
raise ValueError(f"Pipeline {analysis_obj.pipeline} does not support Genotype upload")
Expand Down
2 changes: 1 addition & 1 deletion cg/meta/upload/scout/balsamic_config_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,5 +93,5 @@ def build_load_config(self) -> None:
LOG.info("Building samples")
db_sample: FamilySample

for db_sample in self.analysis_obj.family.links:
for db_sample in self.analysis_obj.case.links:
self.load_config.samples.append(self.build_config_sample(case_sample=db_sample))
8 changes: 4 additions & 4 deletions cg/meta/upload/scout/mip_config_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def build_load_config(self, rank_score_threshold: int = 5) -> None:

self.add_common_info_to_load_config()
mip_analysis_data: MipAnalysis = self.mip_analysis_api.get_latest_metadata(
self.analysis_obj.family.internal_id
self.analysis_obj.case.internal_id
)
self.load_config.human_genome_build = (
"38" if "38" in mip_analysis_data.genome_build else "37"
Expand All @@ -59,7 +59,7 @@ def build_load_config(self, rank_score_threshold: int = 5) -> None:

self.load_config.gene_panels = (
self.mip_analysis_api.convert_panels(
self.analysis_obj.family.customer.internal_id, self.analysis_obj.family.panels
self.analysis_obj.case.customer.internal_id, self.analysis_obj.case.panels
)
or None
)
Expand All @@ -68,14 +68,14 @@ def build_load_config(self, rank_score_threshold: int = 5) -> None:

LOG.info("Building samples")
db_sample: FamilySample
for db_sample in self.analysis_obj.family.links:
for db_sample in self.analysis_obj.case.links:
self.load_config.samples.append(self.build_config_sample(case_sample=db_sample))
self.include_pedigree_picture()

def include_pedigree_picture(self) -> None:
if self.is_multi_sample_case(self.load_config):
if self.is_family_case(self.load_config):
svg_path: Path = self.run_madeline(self.analysis_obj.family)
svg_path: Path = self.run_madeline(self.analysis_obj.case)
self.load_config.madeline = str(svg_path)
else:
LOG.info("family of unconnected samples - skip pedigree graph")
Expand Down
2 changes: 1 addition & 1 deletion cg/meta/upload/scout/rnafusion_config_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def build_load_config(self) -> None:
LOG.info("Building samples")
db_sample: FamilySample

for db_sample in self.analysis_obj.family.links:
for db_sample in self.analysis_obj.case.links:
self.load_config.samples.append(self.build_config_sample(case_sample=db_sample))

def include_case_files(self) -> None:
Expand Down
16 changes: 8 additions & 8 deletions cg/meta/upload/scout/scout_config_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,11 @@ def __init__(self, hk_version_obj: Version, analysis_obj: Analysis, lims_api: Li
def add_common_info_to_load_config(self) -> None:
"""Add the mandatory common information to a scout load config object"""
self.load_config.analysis_date = self.analysis_obj.completed_at
self.load_config.default_gene_panels = self.analysis_obj.family.panels
self.load_config.family = self.analysis_obj.family.internal_id
self.load_config.family_name = self.analysis_obj.family.name
self.load_config.owner = self.analysis_obj.family.customer.internal_id
self.load_config.synopsis = self.analysis_obj.family.synopsis
self.load_config.default_gene_panels = self.analysis_obj.case.panels
self.load_config.family = self.analysis_obj.case.internal_id
self.load_config.family_name = self.analysis_obj.case.name
self.load_config.owner = self.analysis_obj.case.customer.internal_id
self.load_config.synopsis = self.analysis_obj.case.synopsis
self.include_cohorts()
self.include_phenotype_groups()
self.include_phenotype_terms()
Expand Down Expand Up @@ -92,7 +92,7 @@ def include_phenotype_terms(self) -> None:
LOG.info("Adding phenotype terms to scout load config")
phenotype_terms: set[str] = set()
link_obj: FamilySample
for link_obj in self.analysis_obj.family.links:
for link_obj in self.analysis_obj.case.links:
sample_obj: Sample = link_obj.sample
for phenotype_term in sample_obj.phenotype_terms:
LOG.debug(
Expand All @@ -108,7 +108,7 @@ def include_phenotype_groups(self) -> None:
LOG.info("Adding phenotype groups to scout load config")
phenotype_groups: set[str] = set()
link_obj: FamilySample
for link_obj in self.analysis_obj.family.links:
for link_obj in self.analysis_obj.case.links:
sample_obj: Sample = link_obj.sample
for phenotype_group in sample_obj.phenotype_groups:
LOG.debug(
Expand All @@ -122,7 +122,7 @@ def include_phenotype_groups(self) -> None:

def include_cohorts(self) -> None:
LOG.info("Including cohorts to scout load config")
cohorts: list[str] = self.analysis_obj.family.cohorts
cohorts: list[str] = self.analysis_obj.case.cohorts
if cohorts:
LOG.debug("Adding cohorts %s", ", ".join(cohorts))
self.load_config.cohorts = cohorts
Expand Down
6 changes: 2 additions & 4 deletions cg/meta/upload/scout/uploadscoutapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def generate_config(self, analysis_obj: Analysis) -> ScoutLoadConfig:

# Fetch last version from housekeeper
# This should be safe since analyses are only added if data is analysed
hk_version_obj: Version = self.housekeeper.last_version(analysis_obj.family.internal_id)
hk_version_obj: Version = self.housekeeper.last_version(analysis_obj.case.internal_id)
LOG.debug("Found housekeeper version %s", hk_version_obj.id)

load_config: ScoutLoadConfig
Expand Down Expand Up @@ -426,9 +426,7 @@ def _dna_cases_related_to_dna_sample(
self, dna_sample: Sample, collaborators: set[Customer]
) -> list[str]:
"""Maps a list of uploaded DNA cases linked to DNA sample."""
potential_cases_related_to_dna_sample: list[Case] = [
dna_sample_family_relation.family for dna_sample_family_relation in dna_sample.links
]
potential_cases_related_to_dna_sample: list[Case] = [link.case for link in dna_sample.links]
return self.filter_cases_related_to_dna_sample(
list_of_dna_cases=potential_cases_related_to_dna_sample, collaborators=collaborators
)
Expand Down
2 changes: 1 addition & 1 deletion cg/meta/upload/upload_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def update_uploaded_at(self, analysis: Analysis) -> None:

self.status_db.session.commit()
self.trailblazer_api.set_analysis_uploaded(
case_id=analysis.family.internal_id, uploaded_at=analysis.uploaded_at
case_id=analysis.case.internal_id, uploaded_at=analysis.uploaded_at
)

@staticmethod
Expand Down
2 changes: 1 addition & 1 deletion cg/meta/workflow/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ def upload_bundle_statusdb(self, case_id: str, dry_run: bool = False) -> None:
completed_at=dt.datetime.now(),
primary=(len(case_obj.analyses) == 0),
)
new_analysis.family = case_obj
new_analysis.case = case_obj
if dry_run:
LOG.info("Dry-run: StatusDB changes will not be commited")
return
Expand Down
4 changes: 2 additions & 2 deletions cg/meta/workflow/balsamic.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def get_concatenated_fastq_path(self, link_object: FamilySample) -> Path:
concatenated_fastq_name: str = self.fastq_handler.get_concatenated_name(linked_fastq_name)
return Path(
self.root_dir,
link_object.family.internal_id,
link_object.case.internal_id,
"fastq",
concatenated_fastq_name,
)
Expand Down Expand Up @@ -544,7 +544,7 @@ def resolve_target_bed(
return panel_bed
if self.get_application_type(link_object.sample) not in self.__BALSAMIC_BED_APPLICATIONS:
return None
return self.get_target_bed_from_lims(link_object.family.internal_id)
return self.get_target_bed_from_lims(link_object.case.internal_id)

def get_pipeline_version(self, case_id: str) -> str:
LOG.debug("Fetch pipeline version")
Expand Down
2 changes: 1 addition & 1 deletion cg/meta/workflow/microsalt.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ def get_case_id_from_sample(self, unique_id: str) -> tuple[str, str]:
if not sample:
LOG.error("No sample found with id: %s", unique_id)
raise click.Abort
case_id = sample.links[0].family.internal_id
case_id = sample.links[0].case.internal_id
sample_id = sample.internal_id
return case_id, sample_id

Expand Down
2 changes: 1 addition & 1 deletion cg/meta/workflow/mip_dna.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def config_sample(
sample_data["capture_kit"]: str = panel_bed or DEFAULT_CAPTURE_KIT
else:
sample_data["capture_kit"]: Optional[str] = panel_bed or self.get_target_bed_from_lims(
case_id=link_obj.family.internal_id
case_id=link_obj.case.internal_id
)
if link_obj.mother:
sample_data[Pedigree.MOTHER.value]: str = link_obj.mother.internal_id
Expand Down
Loading

0 comments on commit 0c31ec5

Please sign in to comment.