Skip to content

Commit

Permalink
Convert to f-strings (#2672) (patch)
Browse files Browse the repository at this point in the history
### Fixed

- Usage of f-strings for parts of the repo.
  • Loading branch information
islean authored Nov 9, 2023
1 parent dc28515 commit 4757e3b
Show file tree
Hide file tree
Showing 48 changed files with 152 additions and 176 deletions.
12 changes: 6 additions & 6 deletions cg/apps/crunchy/crunchy.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def __init__(self, config: dict):
def set_dry_run(self, dry_run: bool) -> None:
"""Update dry run."""
LOG.info("Updating compress api")
LOG.info("Set dry run to %s", dry_run)
LOG.info(f"Set dry run to {dry_run}")
self.dry_run = dry_run
self.slurm_api.set_dry_run(dry_run=dry_run)

Expand All @@ -56,7 +56,7 @@ def set_dry_run(self, dry_run: bool) -> None:
def is_compression_pending(compression_obj: CompressionData) -> bool:
"""Check if compression/decompression has started but not finished."""
if compression_obj.pending_exists():
LOG.info("Compression/decompression is pending for %s", compression_obj.run_name)
LOG.info(f"Compression/decompression is pending for {compression_obj.run_name}")
return True
LOG.info("Compression/decompression is not running")
return False
Expand Down Expand Up @@ -99,7 +99,7 @@ def is_spring_decompression_possible(compression_obj: CompressionData) -> bool:
"""
if compression_obj.pending_exists():
LOG.info("Compression/decompression is pending for %s", compression_obj.run_name)
LOG.info(f"Compression/decompression is pending for {compression_obj.run_name}")
return False

if not compression_obj.spring_exists():
Expand Down Expand Up @@ -203,7 +203,7 @@ def is_spring_decompression_done(compression_obj: CompressionData) -> bool:
@staticmethod
def create_pending_file(pending_path: Path, dry_run: bool) -> None:
"""Create a pending flag file."""
LOG.info("Creating pending flag %s", pending_path)
LOG.info(f"Creating pending flag {pending_path}")
if dry_run:
return
pending_path.touch(exist_ok=False)
Expand Down Expand Up @@ -254,7 +254,7 @@ def fastq_to_spring(self, compression_obj: CompressionData, sample_id: str = "")
sbatch_number: int = self.slurm_api.submit_sbatch(
sbatch_content=sbatch_content, sbatch_path=sbatch_path
)
LOG.info("Fastq compression running as job %s", sbatch_number)
LOG.info(f"Fastq compression running as job {sbatch_number}")
return sbatch_number

def spring_to_fastq(self, compression_obj: CompressionData, sample_id: str = "") -> int:
Expand Down Expand Up @@ -307,7 +307,7 @@ def spring_to_fastq(self, compression_obj: CompressionData, sample_id: str = "")
sbatch_number: int = self.slurm_api.submit_sbatch(
sbatch_content=sbatch_content, sbatch_path=sbatch_path
)
LOG.info("Spring decompression running as job %s", sbatch_number)
LOG.info(f"Spring decompression running as job {sbatch_number}")
return sbatch_number

@staticmethod
Expand Down
2 changes: 1 addition & 1 deletion cg/apps/crunchy/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def get_tmp_dir(prefix: str, suffix: str, base: str = None) -> str:
with tempfile.TemporaryDirectory(prefix=prefix, suffix=suffix, dir=base) as dir_name:
tmp_dir_path = dir_name

LOG.info("Created temporary dir %s", tmp_dir_path)
LOG.info(f"Created temporary dir {tmp_dir_path}")
return tmp_dir_path


Expand Down
6 changes: 3 additions & 3 deletions cg/apps/gt.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def upload(self, bcf_path: str, samples_sex: dict, force: bool = False) -> None:
if force:
upload_parameters.append("--force")

LOG.info("loading VCF genotypes for sample(s): %s", ", ".join(samples_sex.keys()))
LOG.info(f"loading VCF genotypes for sample(s): {', '.join(samples_sex.keys())}")
self.process.run_command(parameters=upload_parameters, dry_run=self.dry_run)

for sample_id in samples_sex:
Expand All @@ -46,13 +46,13 @@ def upload(self, bcf_path: str, samples_sex: dict, force: bool = False) -> None:
def update_sample_sex(self, sample_id: str, sex: str) -> None:
"""Update the sex for a sample in the genotype tool"""
sample_sex_parameters = ["add-sex", sample_id, "-s", sex]
LOG.debug("Set sex for sample %s to %s", sample_id, sex)
LOG.debug(f"Set sex for sample {sample_id} to {sex}")
self.process.run_command(parameters=sample_sex_parameters, dry_run=self.dry_run)

def update_analysis_sex(self, sample_id: str, sex: str) -> None:
"""Update the predicted sex for a sample based on genotype analysis in the genotype tool"""
analysis_sex_parameters = ["add-sex", sample_id, "-a", "sequence", sex]
LOG.debug("Set predicted sex for sample %s to %s for the sequence analysis", sample_id, sex)
LOG.debug(f"Set predicted sex for sample {sample_id} to {sex} for the sequence analysis")
self.process.run_command(parameters=analysis_sex_parameters, dry_run=self.dry_run)

def export_sample(self, days: int = 0) -> str:
Expand Down
4 changes: 2 additions & 2 deletions cg/apps/housekeeper/hk.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def __init__(self, config: dict) -> None:
self.root_dir: str = config["housekeeper"]["root"]

def __getattr__(self, name):
LOG.warning("Called undefined %s on %s, please wrap", name, self.__class__.__name__)
LOG.warning(f"Called undefined {name} on {self.__class__.__name__}, please wrap")
return getattr(self._store, name)

def new_bundle(self, name: str, created_at: dt.datetime = None) -> Bundle:
Expand Down Expand Up @@ -216,7 +216,7 @@ def get_included_path(root_dir: Path, version_obj: Version, file_obj: File) -> P
"""
version_root_dir: Path = Path(root_dir, version_obj.relative_root_dir)
version_root_dir.mkdir(parents=True, exist_ok=True)
LOG.info("Created new bundle version dir: %s", version_root_dir)
LOG.info(f"Created new bundle version dir: {version_root_dir}")
return Path(version_root_dir, Path(file_obj.path).name)

def include_file(self, file_obj: File, version_obj: Version) -> File:
Expand Down
6 changes: 3 additions & 3 deletions cg/apps/lims/order.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def submit_project(self, project_name: str, samples: list[dict], researcher_id:
lims_project = Project.create(
self, researcher=Researcher(self, id=researcher_id), name=project_name
)
LOG.info("%s: created new LIMS project", lims_project.id)
LOG.info(f"{lims_project.id}: created new LIMS project")

containers_data = [
batch.build_container(
Expand All @@ -64,7 +64,7 @@ def submit_project(self, project_name: str, samples: list[dict], researcher_id:
for container in containers
]
container_details = batch.build_container_batch(containers_data)
LOG.debug("%s: saving containers", lims_project.name)
LOG.debug(f"{lims_project.name}: saving containers")
container_map = self.save_containers(container_details)

reagentlabel_samples = [
Expand All @@ -77,7 +77,7 @@ def submit_project(self, project_name: str, samples: list[dict], researcher_id:
samples_data = []
for container in containers:
for sample in container["samples"]:
LOG.debug("%s: adding sample to container: %s", sample["name"], container["name"])
LOG.debug(f"{sample['name']}: adding sample to container: {container['name']}")
lims_container = container_map[container["name"]]
sample_data = batch.build_sample(
name=sample["name"],
Expand Down
2 changes: 1 addition & 1 deletion cg/apps/madeline/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def run(self, family_id: str, samples: list[dict], out_path: str = None) -> path
out_path = output_dir / "madeline.xml"

output_prefix = str(out_path.with_suffix(""))
LOG.info("Generate madeline output to %s", out_path)
LOG.info(f"Generate madeline output to {out_path}")

ped_stream = self.make_ped(family_id, samples)

Expand Down
2 changes: 1 addition & 1 deletion cg/apps/mutacc_auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,5 +76,5 @@ def run_command(command: list):
completed_process = subprocess.run(args=command, check=False)
returncode = completed_process.returncode
if returncode != 0:
LOG.warning("process %s ended with exitcode %d", " ".join(command), returncode)
LOG.warning(f"process {' '.join(command)} ended with exitcode {returncode}")
raise subprocess.CalledProcessError(returncode=returncode, cmd=command)
10 changes: 5 additions & 5 deletions cg/apps/orderform/excel_orderform_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class ExcelOrderformParser(OrderformParser):

def check_orderform_version(self, document_title: str) -> None:
"""Raise an error if the orderform is too new or too old for the order portal"""
LOG.info("Validating that %s is a correct orderform version", document_title)
LOG.info(f"Validating that {document_title} is a correct orderform version")
for valid_orderform in self.VALID_ORDERFORMS:
if valid_orderform in document_title:
return
Expand All @@ -45,7 +45,7 @@ def get_sheet_name(self, sheet_names: list[str]) -> str:

for name in sheet_names:
if name in self.SHEET_NAMES:
LOG.info("Found sheet name %s", name)
LOG.info(f"Found sheet name {name}")
return name
raise OrderFormError("'orderform' sheet not found in Excel file")

Expand All @@ -60,11 +60,11 @@ def get_document_title(workbook: Workbook, orderform_sheet: Worksheet) -> str:
continue
information_sheet: Worksheet = workbook[sheet_name]
document_title = information_sheet.cell(1, 3).value
LOG.info("Found document title %s", document_title)
LOG.info(f"Found document title {document_title}")
return document_title

document_title = orderform_sheet.cell(1, 2).value
LOG.info("Found document title %s", document_title)
LOG.info(f"Found document title {document_title}")
return document_title

@staticmethod
Expand Down Expand Up @@ -195,7 +195,7 @@ def get_customer_id(self) -> str:
def parse_orderform(self, excel_path: str) -> None:
"""Parse out information from an order form"""

LOG.info("Open excel workbook from file %s", excel_path)
LOG.info(f"Open excel workbook from file {excel_path}")
workbook: Workbook = openpyxl.load_workbook(
filename=excel_path, read_only=True, data_only=True
)
Expand Down
2 changes: 1 addition & 1 deletion cg/apps/orderform/orderform_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def group_cases(self) -> dict[str, list[OrderSample]]:
cases[case_id] = []
cases[case_id].append(sample)
if cases:
LOG.info("Found cases %s", ", ".join(cases.keys()))
LOG.info(f"Found cases {', '.join(cases.keys())}")
else:
LOG.info("Could not find any cases")
return cases
Expand Down
2 changes: 1 addition & 1 deletion cg/apps/osticket.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def open_ticket(
res = requests.post(self.url, json=data, headers=self.headers)
if res.ok:
return res.text
LOG.error("res.text: %s, reason: %s", res.text, res.reason)
LOG.error(f"res.text: {res.text}, reason: {res.reason}")
raise TicketCreationError(res)

@staticmethod
Expand Down
6 changes: 3 additions & 3 deletions cg/apps/slurm/slurm_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,14 +62,14 @@ def generate_sbatch_body(commands: str, error_function: Optional[str] = None) ->
@staticmethod
def write_sbatch_file(sbatch_content: str, sbatch_path: Path, dry_run: bool) -> None:
if dry_run:
LOG.info("Write sbatch content to path %s: \n%s", sbatch_path, sbatch_content)
LOG.info(f"Write sbatch content to path {sbatch_path}: \n{sbatch_content}")
return
LOG.debug("Write sbatch content %s to %s", sbatch_content, sbatch_path)
LOG.debug(f"Write sbatch content {sbatch_content} to {sbatch_path}")
with open(sbatch_path, mode="w+t") as sbatch_file:
sbatch_file.write(sbatch_content)

def submit_sbatch_job(self, sbatch_path: Path) -> int:
LOG.info("Submit sbatch %s", sbatch_path)
LOG.info(f"Submit sbatch {sbatch_path}")
sbatch_parameters: list[str] = [str(sbatch_path)]
self.process.run_command(parameters=sbatch_parameters, dry_run=self.dry_run)
if self.process.stderr:
Expand Down
2 changes: 1 addition & 1 deletion cg/apps/tb/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def add_pending_analysis(
"ticket": ticket,
"workflow_manager": workflow_manager,
}
LOG.debug("Submitting job to Trailblazer: %s", request_body)
LOG.debug(f"Submitting job to Trailblazer: {request_body}")
response = self.query_trailblazer(command="add-pending-analysis", request_body=request_body)
if response:
return TrailblazerAnalysis.model_validate(response)
Expand Down
14 changes: 7 additions & 7 deletions cg/cli/add.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@
from cg.store.models import (
Application,
ApplicationVersion,
Collaboration,
Customer,
Case,
CaseSample,
Collaboration,
Customer,
Panel,
Sample,
User,
Expand Down Expand Up @@ -300,32 +300,32 @@ def link_sample_to_case(
father: Optional[Sample] = None
case_obj: Case = status_db.get_case_by_internal_id(internal_id=case_id)
if case_obj is None:
LOG.error("%s: family not found", case_id)
LOG.error(f"{case_id}: family not found")
raise click.Abort

sample: Sample = status_db.get_sample_by_internal_id(internal_id=sample_id)
if sample is None:
LOG.error("%s: sample not found", sample_id)
LOG.error(f"{sample_id}: sample not found")
raise click.Abort

if mother_id:
mother: Sample = status_db.get_sample_by_internal_id(internal_id=mother_id)
if mother is None:
LOG.error("%s: mother not found", mother_id)
LOG.error(f"{mother_id}: mother not found")
raise click.Abort

if father_id:
father: Sample = status_db.get_sample_by_internal_id(internal_id=father_id)
if father is None:
LOG.error("%s: father not found", father_id)
LOG.error(f"{father_id}: father not found")
raise click.Abort

new_record: CaseSample = status_db.relate_sample(
case=case_obj, sample=sample, status=status, mother=mother, father=father
)
status_db.session.add(new_record)
status_db.session.commit()
LOG.info("related %s to %s", case_obj.internal_id, sample.internal_id)
LOG.info(f"related {case_id} to {sample_id}")


@add.command("external")
Expand Down
9 changes: 4 additions & 5 deletions cg/cli/backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,13 +174,12 @@ def archive_spring_files(config: CGConfig, context: click.Context, dry_run: bool
tags=[SequencingFileTag.SPRING]
).filter(hk_models.File.path.like(f"%{config.environment}/{config.demultiplex.out_dir}%"))
for spring_file in spring_files:
LOG.info("Attempting encryption and PDC archiving for file %s", spring_file.path)
LOG.info(f"Attempting encryption and PDC archiving for file {spring_file.path}")
if Path(spring_file.path).exists():
context.invoke(archive_spring_file, spring_file_path=spring_file.path, dry_run=dry_run)
else:
LOG.warning(
"Spring file %s found in Housekeeper, but not on disk! Archiving process skipped!",
spring_file.path,
f"Spring file {spring_file.path} found in Housekeeper, but not on disk! Archiving process skipped!"
)


Expand Down Expand Up @@ -255,14 +254,14 @@ def _get_samples(status_api: Store, object_type: str, identifier: str) -> list[S
@click.pass_obj
def retrieve_spring_file(config: CGConfig, spring_file_path: str, dry_run: bool):
"""Retrieve a spring file from PDC"""
LOG.info("Attempting PDC retrieval and decryption file %s", spring_file_path)
LOG.info(f"Attempting PDC retrieval and decryption file {spring_file_path}")
housekeeper_api: HousekeeperAPI = config.housekeeper_api
pdc_api: PdcAPI = PdcAPI(binary_path=config.pdc.binary_path, dry_run=dry_run)
encryption_api: SpringEncryptionAPI = SpringEncryptionAPI(
binary_path=config.encryption.binary_path,
dry_run=dry_run,
)
LOG.debug("Start spring retrieval if not dry run mode=%s", dry_run)
LOG.debug(f"Start spring retrieval if not dry run mode={dry_run}")
spring_backup_api: SpringBackupAPI = SpringBackupAPI(
encryption_api=encryption_api,
hk_api=housekeeper_api,
Expand Down
6 changes: 2 additions & 4 deletions cg/cli/compress/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def get_fastq_individuals(store: Store, case_id: str = None) -> Iterator[str]:
"""Fetch individual ids from cases that are ready for SPRING compression"""
case_obj = store.get_case_by_internal_id(internal_id=case_id)
if not case_obj:
LOG.error("Could not find case %s", case_id)
LOG.error(f"Could not find case {case_id}")
raise CaseNotFoundError("")

for link_obj in case_obj.links:
Expand Down Expand Up @@ -201,9 +201,7 @@ def correct_spring_paths(
LOG.info("Could not find spring and/or spring metadata files, skipping")
continue
LOG.info(
"Moving existing spring file (and config) %s to hk bundle path %s",
true_spring_path,
spring_path,
f"Moving existing spring file (and config) {true_spring_path} to hk bundle path {spring_path}"
)
if not dry_run:
# We know from above that the spring path does not exist
Expand Down
2 changes: 1 addition & 1 deletion cg/cli/delete/case.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def _delete_sample(dry_run: bool, sample: Sample, status_db: Store, yes: bool):
return

if _is_sample_linked(sample):
LOG.info("Can NOT delete sample: %s", sample.internal_id)
LOG.info(f"Can NOT delete sample: {sample.internal_id}")
_log_sample_links(sample)
return

Expand Down
6 changes: 3 additions & 3 deletions cg/cli/deliver/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,13 +99,13 @@ def deliver_analysis(
if case_id:
case_obj: Case = status_db.get_case_by_internal_id(internal_id=case_id)
if not case_obj:
LOG.warning("Could not find case %s", case_id)
LOG.warning(f"Could not find case {case_id}")
return
cases.append(case_obj)
else:
cases: list[Case] = status_db.get_cases_by_ticket_id(ticket_id=ticket)
if not cases:
LOG.warning("Could not find cases for ticket %s", ticket)
LOG.warning(f"Could not find cases for ticket {ticket}")
return

for case_obj in cases:
Expand All @@ -123,7 +123,7 @@ def rsync(context: CGConfig, ticket: str, dry_run: bool):
tb_api: TrailblazerAPI = context.trailblazer_api
rsync_api: RsyncAPI = RsyncAPI(config=context)
slurm_id = rsync_api.run_rsync_on_slurm(ticket=ticket, dry_run=dry_run)
LOG.info("Rsync to the delivery server running as job %s", slurm_id)
LOG.info(f"Rsync to the delivery server running as job {slurm_id}")
rsync_api.add_to_trailblazer_api(
tb_api=tb_api, slurm_job_id=slurm_id, ticket=ticket, dry_run=dry_run
)
Expand Down
Loading

0 comments on commit 4757e3b

Please sign in to comment.