Skip to content

Commit

Permalink
Merge pull request #61 from pvandyken/qc-app
Browse files Browse the repository at this point in the history
Incorporate new qc-viewer app
  • Loading branch information
akhanf authored Aug 10, 2023
2 parents 4dc4420 + 85d9651 commit e50a342
Show file tree
Hide file tree
Showing 14 changed files with 258 additions and 91 deletions.
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,7 @@ test_out/

# Misc files
.*.swp
report.html
report.html

# Linters and tools
.ruff_cache
Binary file added snakedwi/resources/qc-app.tar.gz
Binary file not shown.
9 changes: 3 additions & 6 deletions snakedwi/workflow/Snakefile
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,7 @@ include: "rules/diffusion/sdc/synb0.smk"
include: "rules/diffusion/eddy.smk"
include: "rules/diffusion/reg_dwi_to_t1.smk"
include: "rules/diffusion/bedpost.smk"


localrules:
check_subj_dwi_metadata,
include: "rules/qc_app.smk"


# Currently unused
Expand Down Expand Up @@ -61,8 +58,8 @@ rule all:
input:
**get_eddy_quad_all(),
**get_bedpost_all(),
mask_qc=expand(rules.qc_b0_brainmask.output.png, zip, **subj_zip_list),
reg_qc=expand(rules.qc_reg_dwi_t1.output.png, zip, **subj_zip_list),
qc=rules.qc.output,
qc_app=rules.unpack_qc_app.output,
dtifit=expand(
bids(
root=root,
Expand Down
153 changes: 153 additions & 0 deletions snakedwi/workflow/lib/check_subj_dwi_metadata.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
#!/usr/bin/env python
import json
import sys
from typing import List, Tuple


def check_eddy_s2v(
eddy_s2v: bool,
slspec_txt: bool,
has_slice_timing: bool,
json_file: str,
) -> None:
"""Check if slice-to-volume correction can be performed"""
if eddy_s2v and not slspec_txt:
if not has_slice_timing:
print(
"Eddy slice-to-volume enabled, but 'SliceTiming' not found in "
f"{json_file}.\n"
"You must do one of the following:\n"
" 1. add the SliceTiming field to your dwi JSON files\n"
" 2. use the '--slspec_txt' option to provide a global slspec "
"file\n"
" 3. do not set the --use_eddy_s2v CLI option to disable "
"slice-to-volume correction"
)
sys.exit(1)


def check_pe_direction(json_dwi: dict, default_pe_dir: str, json_file: str) -> None:
"""Check if phase encoding direction is defined"""
if "PhaseEncodingDirection" not in json_dwi:
if not default_pe_dir == "":
print("WARNING: setting default PhaseEncodingDirection")
json_dwi["PhaseEncodingDirection"] = default_pe_dir
else:
if "PhaseEncodingAxis" in json_dwi:
print(
"WARNING: assuming PhaseEncodingDirection from " "PhaseEncodingAxis"
)
json_dwi["PhaseEncodingDirection"] = json_dwi["PhaseEncodingAxis"]
else:
print(
f"ERROR: PhaseEncodingDirection not found in {json_file}."
"\nYou must add the PhaseEncodingDirection field to your "
"dwi JSON files, or use the "
"--default_phase_encoding_direction CLI option"
)
sys.exit(1)


def check_echo_spacing(json_dwi: dict, default_echo_spacing: str) -> str:
"""Check if effective echo spacing is defined"""
if "EffectiveEchoSpacing" in json_dwi:
eff_echo = json_dwi["EffectiveEchoSpacing"]
elif "EstimatedEffectiveEchoSpacing" in json_dwi:
eff_echo = json_dwi["EstimatedEffectiveEchoSpacing"]
else:
print("EffectiveEchoSpacing not defined in JSON, using default value")
eff_echo = default_echo_spacing

return eff_echo


def get_pe_info(json_dwi: dict, pe_axes: List[str], pe_dirs: List[str]) -> None:
"""Extract phase encoding information"""
pe_axes.append(json_dwi["PhaseEncodingDirection"][0])

if json_dwi["PhaseEncodingDirection"][-1] == "-":
pe_dirs.append("-")
else:
pe_dirs.append("+")


def get_pe_indices(
pe_axes: List[str], pe_dirs: List[str], json_files: List[str]
) -> Tuple[List[int], List[str], List[str]]:
"""Extract indices to be used in processing"""
# If multiple PE axes use LR/RL if available, otherwise AP otherwise
if len(set(pe_axes)) > 1:
lr_indices = [idx for idx, pe in enumerate(pe_axes) if pe == "i"]
ap_indices = [idx for idx, pe in enumerate(pe_axes) if pe == "j"]

# If opposite PE directions in LR/RL, otherwise use AP
use_indices = (
lr_indices
if len(set([pe_dirs[idx] for idx in lr_indices])) == 2
else ap_indices
)
else:
use_indices = [idx for idx in range(len(json_files))]

# Select indices
pe_axes = [pe_axes[idx] for idx in use_indices]
pe_dirs = [pe_dirs[idx] for idx in use_indices]

return use_indices, pe_axes, pe_dirs


def check_subj_dwi_metadata(
json_files: List[str],
smk_config: dict,
) -> None:
bool_map = {True: "yes", False: "no"}
pe_axes, pe_dirs = [], []
eddy_s2v = smk_config["use_eddy_s2v"]
slspec_txt = smk_config["slspec_txt"]

for json_file in json_files:
with open(json_file, "r") as fname:
json_dwi = json.load(fname)

has_slice_timing = True if "SliceTiming" in json_dwi else False

# Slice-to-volume
check_eddy_s2v(
eddy_s2v=eddy_s2v,
slspec_txt=slspec_txt,
has_slice_timing=has_slice_timing,
json_file=json_file,
)
# Phase encoding direction
check_pe_direction(
json_dwi=json_dwi,
default_pe_dir=smk_config["default_phase_encoding_direction"],
json_file=json_file,
)
# Effective echo spacing (not used)
eff_echo = check_echo_spacing( # noqa: F841
json_dwi=json_dwi,
default_echo_spacing=smk_config["default_effective_echo_spacing"],
)
# Save PE information
get_pe_info(json_dwi=json_dwi, pe_axes=pe_axes, pe_dirs=pe_dirs)

# Get PE indices
use_indices, pe_axes, pe_dirs = get_pe_indices(
pe_axes=pe_axes, pe_dirs=pe_dirs, json_files=json_files
)

# Indices
settings = {
"indices": use_indices,
"eddys2v": bool_map[eddy_s2v],
"PEaxis": pe_axes[0],
}
if smk_config["sdc_method"] == "optimal":
settings["sdc"] = (
"topup" if len(set(pe_dirs)) >= 2 else smk_config["sdc_method_alternate"]
)
else:
settings["sdc"] = smk_config["sdc_method"]

return settings
5 changes: 1 addition & 4 deletions snakedwi/workflow/rules/diffusion/method_grabber.smk
Original file line number Diff line number Diff line change
Expand Up @@ -53,10 +53,7 @@ def get_dwi_ref(wildcards):
**subj_wildcards,
)
else:
checkpoint_output = checkpoints.check_subj_dwi_metadata.get(**wildcards).output[
0
]
([method],) = glob_wildcards(os.path.join(checkpoint_output, "sdc-{method}"))
method = get_sdc_method(wildcards)

return sdc_methods[method]

Expand Down
16 changes: 16 additions & 0 deletions snakedwi/workflow/rules/diffusion/prepdwi.smk
Original file line number Diff line number Diff line change
Expand Up @@ -458,3 +458,19 @@ rule qc_b0_brainmask:
config["singularity"]["python"]
script:
"../../scripts/qc/vis_qc_dseg.py"


rule compile_qc_b0_brainmask_manifest:
input:
mask_qc=expand(rules.qc_b0_brainmask.output.png, zip, **subj_zip_list),
output:
os.path.join(qc, "data", "mask.json"),
run:
with open(output[0], "w") as f:
json.dump(
{
"title": "DWI Brainmask",
"images": sorted(input["mask_qc"]),
},
f,
)
16 changes: 16 additions & 0 deletions snakedwi/workflow/rules/diffusion/reg_dwi_to_t1.smk
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,22 @@ rule qc_reg_dwi_t1:
"../../scripts/qc/vis_regqc.py"


rule compile_qc_reg_dwi_t1_manifest:
input:
reg_qc=expand(rules.qc_reg_dwi_t1.output.png, zip, **subj_zip_list),
output:
os.path.join(qc, "data", "reg.json"),
run:
with open(output[0], "w") as f:
json.dump(
{
"title": "T1w Registration",
"images": sorted(input["reg_qc"]),
},
f,
)


rule convert_xfm_ras2itk:
input:
xfm_ras=rules.reg_dwi_to_t1.output.xfm_ras,
Expand Down
1 change: 0 additions & 1 deletion snakedwi/workflow/rules/diffusion/sdc/synthsr.smk
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,6 @@ def get_restrict_deformation(wildcards, input, output):

rule reg_b0_to_t1_synthsr:
input:
rules.check_subj_dwi_metadata.output,
t1synth=rules.rigid_reg_t1_to_b0_synthsr.output.warped_subj,
b0synth=rules.reslice_synthSR_b0.output.synthsr,
params:
Expand Down
3 changes: 0 additions & 3 deletions snakedwi/workflow/rules/diffusion/sdc/topup.smk
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,6 @@ rule apply_topup_jac:
phenc_concat=rules.concat_phase_encode_txt.output.phenc_concat,
topup_fieldcoef=rules.run_topup.output.topup_fieldcoef,
topup_movpar=rules.run_topup.output.topup_movpar,
workflowopts=bids(
root=root, datatype="dwi", suffix="workflowopts", **subj_wildcards
),
params:
inindex=get_applytopup_inindex,
topup_prefix=bids(
Expand Down
5 changes: 1 addition & 4 deletions snakedwi/workflow/rules/gradcorrect.smk
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,7 @@ sdc_methods = {

def get_dwi_ref_for_gradcorrect(wildcards):
if config["gradcorrect_coeffs"]:
checkpoint_output = checkpoints.check_subj_dwi_metadata.get(**wildcards).output[
0
]
([method],) = glob_wildcards(os.path.join(checkpoint_output, "sdc-{method}"))
method = get_sdc_method(wildcards)

return sdc_methods[method]
else:
Expand Down
38 changes: 38 additions & 0 deletions snakedwi/workflow/rules/qc_app.smk
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
rule qc:
input:
mask_qc=rules.compile_qc_b0_brainmask_manifest.output[0],
reg_qc=rules.compile_qc_reg_dwi_t1_manifest.output[0],
output:
os.path.join(qc, "data.json"),
run:
with open(output[0], "w") as f:
json.dump(
{
"mask": json.loads(Path(input["mask_qc"]).read_text()),
"reg": json.loads(Path(input["reg_qc"]).read_text()),
},
f,
)


_qc_app = os.path.join(workflow.basedir, "..", "resources", "qc-app.tar.gz")


def _get_tar_contents(file):
try:
return [
p
for p in sp.check_output(["tar", "-tf", _qc_app]).decode().splitlines()
if p[-1] != "/"
]
except sp.CalledProcessError as err:
raise Exception("Unable to find qc-app.tar.gz...") from err


rule unpack_qc_app:
input:
os.path.join(workflow.basedir, "..", "resources", "qc-app.tar.gz"),
output:
_get_tar_contents(_qc_app),
shell:
"tar -xvzf {input}"
5 changes: 5 additions & 0 deletions snakedwi/workflow/rules/setup.smk
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
from functools import lru_cache
import json
from pathlib import Path
from snakebids import (
bids,
generate_inputs,
filter_list,
get_wildcard_constraints,
)
import subprocess as sp

# from snakeboost import PipEnv
from lib.check_subj_dwi_metadata import check_subj_dwi_metadata


# writes inputs_config.yml and updates config dict
Expand Down Expand Up @@ -39,6 +43,7 @@ input_path = inputs.input_path

root = os.path.join(config["root"], "snakedwi")
work = os.path.join(config["root"], "work")
qc = os.path.join(config["root"], "qc")
# setup pipenvs - all my python rules use the script: directive, so will be some work to use snakeboost for this..
# dwi_env = PipEnv(
# root=Path('work'),
Expand Down
Loading

0 comments on commit e50a342

Please sign in to comment.