diff --git a/docs/README.md b/docs/README.md index 2c2cd0c..3294a87 100644 --- a/docs/README.md +++ b/docs/README.md @@ -99,16 +99,26 @@ Finally, see [here](#running-luigi-pnlpipe) for instructions about running the p ## luigi package -A *client* (PNL external collaborator) can use the official luigi package installed in `pnlpipe3` conda environment. -However, a *server* should install Tashrif's development for `/history/by_task_id/` URL to function: +Tashrif's development of `/history/by_task_id/` feature got merged in the official `luigi` package. +Until a release is available, you should install it as: - pip install git+https://github.com/tashrifbillah/luigi.git@89c9aa750de8ae2badabe435d98c02e44a1aa8b4 + pip install git+https://github.com/spotify/luigi.git@172128c3de7a41411a10e61e3c675b76595793e2 -*luigi-pnlpipe* itself will not fail without Tashrif's development on the *server* side. That means, you can -also use the official luigi package on the *server* side. But you will not be able to redirect to -`/history/by_task_id/` URLs generated in `*.log.html` provenance files. Notably, the provenance files -are generated on the *client* side by [_provenance.py](https://github.com/pnlbwh/luigi-pnlpipe/blob/afa6c8a86d481d8fe5d04ba1ceb533b5da740c32/workflows/_provenance.py) when *luigi-pipeline* is run. +However, `luigi` history feature does not work with the latest `sqlalchemy`. Install an older version as: + pip install sqlalchemy==1.4.54 + +Reference: https://github.com/spotify/luigi/issues/3320 + +If you are an outside collaborator, you can launch a Luigi server as: + + luigi-pnlpipe/start_server.sh + +And subsequently, uncomment this line in `luigi-pnlpipe/luigi.cfg` to use it: + + default-scheduler-url = http://localhost:8082/ + +Psychiatry Neuroimaging Laboratory staff have it available readily. They do not need to launch one. ## CNN-Diffusion-MRIBrain-Segmentation diff --git a/luigi.cfg b/luigi.cfg index d90fa80..3ac85da 100644 --- a/luigi.cfg +++ b/luigi.cfg @@ -1,10 +1,13 @@ [core] +# uncomment one of the Luigi servers + # for individual machine # default-scheduler-url = http://localhost:8082/ -# for High Performance Cluster (lsf script, bsub command) -# default-scheduler-url = http://cmu166.research.partners.org:8082/ +# PNL hosted private server +# default-scheduler-url = http://${LUIGI_USERNAME}:${LUIGI_PASSWORD}@pnl-elite-1.partners.org/ # PNL hosted public server -default-scheduler-url = https://${LUIGI_USERNAME}:${LUIGI_PASSWORD}@pnlservers.bwh.harvard.edu/luigi/ +# default-scheduler-url = https://${LUIGI_USERNAME}:${LUIGI_PASSWORD}@pnlservers.bwh.harvard.edu/luigi/ + log_level = INFO [scheduler] diff --git a/luigid.service b/luigid.service new file mode 100644 index 0000000..c3a10c0 --- /dev/null +++ b/luigid.service @@ -0,0 +1,17 @@ +[Unit] +Description=PNL Luigi server at http://pnl-elite-1.partners.org/ +After=network-online.target nginx.service +Wants=network-online.target nginx.service + +[Service] +User=sf284 +Type=oneshot +RemainAfterExit=yes +ExecStart=/opt/start_server.sh +ExecStop=pkill luigid +KillMode=process +TimeoutSec=60 + +[Install] +WantedBy=multi-user.target + diff --git a/params/cte/dwi_pipe_params.cfg b/params/cte/dwi_pipe_params.cfg index 49e10ee..49a2707 100644 --- a/params/cte/dwi_pipe_params.cfg +++ b/params/cte/dwi_pipe_params.cfg @@ -21,7 +21,7 @@ filter: acqp: /data/pnl/DIAGNOSE_CTE_U01/acqp.txt index: /data/pnl/DIAGNOSE_CTE_U01/index.txt config: /data/pnl/DIAGNOSE_CTE_U01/eddy_config.txt -useGpu: False +useGpu: True FslOutDir: fsl_eddy @@ -59,6 +59,8 @@ wma_cleanup: 0 [BseMask] +[GibbsUn] + [CnnMask] [PnlEddy] diff --git a/params/dwi_pipe_params.cfg b/params/dwi_pipe_params.cfg index 5086980..779568e 100644 --- a/params/dwi_pipe_params.cfg +++ b/params/dwi_pipe_params.cfg @@ -78,8 +78,12 @@ wma_cleanup: 0 [BseMask] +[GibbsUn] + [CnnMask] +[SynB0] + [PnlEddy] [FslEddy] diff --git a/params/hcp/dwi_pipe_params.cfg b/params/hcp/dwi_pipe_params.cfg index 075813e..3516259 100644 --- a/params/hcp/dwi_pipe_params.cfg +++ b/params/hcp/dwi_pipe_params.cfg @@ -34,22 +34,22 @@ HcpOutDir: hcppipe ## [GibbsUn] ## -unring_nproc: 4 +unring_nproc: 8 ## [Ukf] ## -ukf_params: +ukf_params: --seedsPerVoxel,1 eddy_epi_task: HcpPipe bhigh: 2000 ## [WMA800] ## -slicer_exec: /data/pnl/soft/pnlpipe3/Slicer-4.10.2-linux-amd64/SlicerWithExtensions.sh -FiberTractMeasurements: /data/pnl/soft/pnlpipe3/Slicer-4.10.2-linux-amd64/SlicerWithExtensions.sh --launch FiberTractMeasurements -atlas: /data/pnl/soft/pnlpipe3/ORG-Atlases-1.2 +slicer_exec: /software/rocky9/Slicer-5.6.1-linux-amd64/Slicer +FiberTractMeasurements: /software/rocky9/Slicer-5.6.1-linux-amd64/slicer.org/Extensions-32438/SlicerDMRI/lib/Slicer-5.6/cli-modules/FiberTractMeasurements +atlas: /software/rocky9/ORG-Atlases-1.2 wma_nproc: 4 xvfb: 1 -wma_cleanup: 0 +wma_cleanup: 2 [StructMask] @@ -58,6 +58,8 @@ wma_cleanup: 0 [BseMask] +[GibbsUn] + [CnnMask] [PnlEddy] diff --git a/params/synb0/acqparams.txt b/params/synb0/acqparams.txt new file mode 100644 index 0000000..714c52a --- /dev/null +++ b/params/synb0/acqparams.txt @@ -0,0 +1,2 @@ +0 1 0 0.05 +0 1 0 0 diff --git a/params/synb0/dwi_pipe_params.cfg b/params/synb0/dwi_pipe_params.cfg new file mode 100644 index 0000000..6231d30 --- /dev/null +++ b/params/synb0/dwi_pipe_params.cfg @@ -0,0 +1,75 @@ +[DEFAULT] + +## [StructMask] ## +mask_method: hd-bet +mask_qc: False + + +## [StructMask] [PnlEddy] [EddyEpi] ## +debug: False + + +## [CnnMask] ## +model_folder: /software/rocky9/CNN-Diffusion-MRIBrain-Segmentation/model_folder +percentile: 97 +filter: + + +## [FslEddy] [TopupEddy] ## +acqp: /software/rocky9/luigi-tutorial/edcrp/acqparams.txt +index: /software/rocky9/luigi-tutorial/edcrp/index.txt +config: /software/rocky9/pnlNipype/scripts/eddy_config.txt +useGpu: False +FslOutDir: fsl_eddy + + +## [EddyEpi] ## +epi_nproc: 8 +eddy_task: FslEddy + + +## [HcpPipe] ## +HcpOutDir: hcppipe + + +## [GibbsUn] ## +unring_nproc: 4 + + +## [Ukf] ## +ukf_params: --numThreads,8 +eddy_epi_task: HcpPipe +bhigh: 2000 + + +## [WMA800] ## +slicer_exec: /software/rocky9/Slicer-5.6.1-linux-amd64/Slicer +FiberTractMeasurements: /software/rocky9/Slicer-5.6.1-linux-amd64/slicer.org/Extensions-32438/SlicerDMRI/lib/Slicer-5.6/cli-modules/FiberTractMeasurements +atlas: /data/pnl/soft/pnlpipe3/ORG-Atlases-1.2 +wma_nproc: 4 +xvfb: 1 +wma_cleanup: + +[StructMask] + +[BseExtract] + +[CnnMask] + +[GibbsUn] + +[SynB0] + +[PnlEddy] + +[FslEddy] + +[TopupEddy] + +[EddyEpi] + +[HcpPipe] + +[Ukf] + +[Wma800] diff --git a/start_server.sh b/start_server.sh new file mode 100755 index 0000000..94ac4cf --- /dev/null +++ b/start_server.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +cd /opt/ +miniconda3/bin/luigid --background --logdir luigi-server diff --git a/workflows/ExecuteTask.py b/workflows/ExecuteTask.py index cc546fe..8eb28c4 100755 --- a/workflows/ExecuteTask.py +++ b/workflows/ExecuteTask.py @@ -2,7 +2,7 @@ import argparse from conversion import read_cases -from luigi import build, configuration +from luigi import build, configuration, parameter from _define_outputs import IO from struct_pipe import StructMask, Freesurfer from dwi_pipe import DwiAlign, GibbsUn, CnnMask, \ @@ -13,6 +13,8 @@ from tempfile import gettempdir from glob import glob +import warnings +warnings.filterwarnings(action='ignore', category=parameter.UnconsumedParameterWarning) def _rm_tempfiles(names): diff --git a/workflows/_deps_tree.py b/workflows/_deps_tree.py index 056668c..2be7ccc 100644 --- a/workflows/_deps_tree.py +++ b/workflows/_deps_tree.py @@ -61,7 +61,7 @@ def get_record_id(task_id): # useful debug commands # full path necessary after .open # sqlite3 - # sqlite> .open /home/tb571/luigi-task-hist.db + # sqlite> .open /path/to/luigi-task-hist.db # sqlite> SELECT * FROM tasks; # sqlite> .quit cur = conn.cursor() diff --git a/workflows/_provenance.py b/workflows/_provenance.py index 9b1b726..98c20ee 100644 --- a/workflows/_provenance.py +++ b/workflows/_provenance.py @@ -1,6 +1,6 @@ from _deps_tree import print_tree, print_history_tree from os.path import join as pjoin, dirname, isfile -from os import getpid, environ +from os import getpid from subprocess import check_call, check_output from tempfile import gettempdir @@ -23,7 +23,7 @@ def _get_env(): # read hashes with open(hash_file) as f: - content= f.read().split() + content= f.read().strip().split('\n') # save hashes in a dictionary for integrating with json provenance hash_dict={} @@ -34,7 +34,7 @@ def _get_env(): # export conda env env_file= pjoin(gettempdir(), f'env-{getpid()}.yml') if not isfile(env_file): - check_output(f"{environ['CONDA_EXE']} env export > {env_file}", shell=True) + check_output(f"conda env export > {env_file}", shell=True) with open(env_file) as f: hash_dict['conda_env']= f.read() @@ -66,7 +66,7 @@ def write_provenance(obj, output=None): logfile= output.dirname.join(output.stem)+'.log.html' with open(logfile,'w') as f: - template= template.replace('{{output}}',output.basename) + template= template.replace('{{output}}',output.name) template= template.replace('{{textHistory}}',tree) template= template.replace('{{htmlHistory}}',history_tree) f.write(template) diff --git a/workflows/_synb0_eddy.sh b/workflows/_synb0_eddy.sh index edd4e93..ef0fc05 100755 --- a/workflows/_synb0_eddy.sh +++ b/workflows/_synb0_eddy.sh @@ -32,7 +32,7 @@ if [ ! -f OUTPUTS/b0_all_topup.nii.gz ] then TMPDIR=$TMPDIR \ singularity run -e -B INPUTS/:/INPUTS -B OUTPUTS/:/OUTPUTS \ - -B ${NEW_SOFT_DIR}/fs7.1.0/license.txt:/extra/freesurfer/license.txt \ + -B ${FREESURFER_HOME}/license.txt:/extra/freesurfer/license.txt \ ${NEW_SOFT_DIR}/containers/synb0-disco_v3.0.sif --stripped fi diff --git a/workflows/dwi_pipe.py b/workflows/dwi_pipe.py index 6b5ec25..1df8107 100755 --- a/workflows/dwi_pipe.py +++ b/workflows/dwi_pipe.py @@ -5,7 +5,7 @@ from glob import glob from os.path import join as pjoin, abspath, isfile, basename, dirname, isdir from os import symlink, getenv -from shutil import move, rmtree +from shutil import move, rmtree, copyfile from plumbum import local from subprocess import Popen, check_call @@ -102,14 +102,14 @@ class CnnMask(Task): def run(self): with TemporaryDirectory() as tmpdir, local.cwd(tmpdir): - symlink(self.input()['dwi'],self.input()['dwi'].basename) - symlink(self.input()['bval'],self.input()['bval'].basename) - symlink(self.input()['bvec'],self.input()['bvec'].basename) + copyfile(self.input()['dwi'],self.input()['dwi'].name) + copyfile(self.input()['bval'],self.input()['bval'].name) + copyfile(self.input()['bvec'],self.input()['bvec'].name) dwi_list= 'dwi_list.txt' with open(dwi_list,'w') as f: - f.write(pjoin(tmpdir,self.input()['dwi'].basename)) + f.write(pjoin(tmpdir,self.input()['dwi'].name)) cmd = (' ').join(['dwi_masking.py', @@ -292,8 +292,7 @@ def run(self): p = Popen(cmd, shell=True) p.wait() - version_file= outDir.join('fsl_version.txt') - check_call(f'eddy_openmp 2>&1 | grep Part > {version_file}', shell= True) + check_call('cp $FSLDIR/etc/fslversion {}'.format(self.output()['dwi'].dirname), shell= True) # fsl_eddy.py writes with this outPrefix outPrefix= outDir.join(self.input()[0]['dwi'].stem)+'_Ed' @@ -333,6 +332,7 @@ class SynB0(Task): def run(self): + # synb0 wrapper DIR= abspath(dirname(__file__)) cmd = (' ').join([f'{DIR}/_synb0_eddy.sh', @@ -347,9 +347,7 @@ def run(self): p = Popen(cmd, shell=True) p.wait() - version_file= self.output()['dwi'].dirname.join('fsl_version.txt') - check_call(f'eddy_openmp 2>&1 | grep Part > {version_file}', shell= True) - + check_call('cp $FSLDIR/etc/fslversion {}'.format(self.output()['dwi'].dirname), shell= True) write_provenance(self, self.output()['dwi']) @@ -519,8 +517,7 @@ def run(self): p = Popen(cmd, shell=True) p.wait() - version_file = outDir.join('fsl_version.txt') - check_call(f'eddy_openmp 2>&1 | grep Part > {version_file}', shell=True) + check_call('cp $FSLDIR/etc/fslversion {}'.format(self.output()['dwi'].dirname), shell=True) with open(outDir.join('.outPrefix.txt')) as f: @@ -572,10 +569,27 @@ def output(self): @inherits(SelectDwiFiles, DwiAlign) -class HcpPipe(ExternalTask): +class HcpPipe(Task): HcpOutDir= Parameter(default='hcppipe') + def run(self): + + if not isfile(self.output()['dwi']): + move(self.dwiHcp, self.output()['dwi']) + move(self.bvalHcp, self.output()['bval']) + move(self.bvecHcp, self.output()['bvec']) + move(self.maskHcp, self.output()['mask']) + move(self.bseHcp, self.output()['bse']) + + # create a placeholder so that future HCP pipe attempt can skip rerun + with open(self.dwiHcp,'w') as f: + f.write('') + + + check_call('cp $FSLDIR/etc/fslversion {}'.format(self.output()['dwi'].dirname), shell=True) + + def output(self): # read one dwi to learn name and containing directory @@ -590,26 +604,15 @@ def output(self): raise NotADirectoryError(f'{hcpEddyDir} does not exist. Provide HCP pipe output directory ' 'via HcpOutDir parameter in {getenv("LUIGI_CONFIG_PATH")}') + # construct HCP pipe outputs - ''' - Observe the following output files in ${StudyFolder}/${Subject}: - dwi: Diffusion/eddy/eddy_unwarped_images.nii.gz - bvals: Diffusion/eddy/Pos_Neg.bvals - bvecs: Diffusion/eddy/eddy_unwarped_images.eddy_rotated_bvecs - mask: Diffusion/eddy/nodif_brain_mask.nii.gz - bse: Diffusion/topup/hifib0.nii.gz - ''' - dwiHcp= f'{hcpOutDir}/Diffusion/eddy/eddy_unwarped_images.nii.gz' - bvalHcp= f'{hcpOutDir}/Diffusion/eddy/Pos_Neg.bvals' - bvecHcp= f'{hcpOutDir}/Diffusion/eddy/eddy_unwarped_images.eddy_rotated_bvecs' - maskHcp= f'{hcpOutDir}/Diffusion/eddy/nodif_brain_mask.nii.gz' - bseHcp= f'{hcpOutDir}/Diffusion/topup/hifib0.nii.gz' + self.dwiHcp= f'{hcpOutDir}/Diffusion/eddy/eddy_unwarped_images.nii.gz' + self.bvalHcp= f'{hcpOutDir}/Diffusion/eddy/Pos_Neg.bvals' + self.bvecHcp= f'{hcpOutDir}/Diffusion/eddy/eddy_unwarped_images.eddy_rotated_bvecs' + self.maskHcp= f'{hcpOutDir}/Diffusion/eddy/nodif_brain_mask.nii.gz' + self.bseHcp= f'{hcpOutDir}/Diffusion/topup/hifib0.nii.gz' - # determine luigi-pnlpipe outputs - # in https://github.com/pnlbwh/luigi-pnlpipe/commit/fc3a1a5319d027e3dad9e6afb393e7399a3d3c62 - # lines 549-581 nearly replicates lines 480-505 of TopupEddy task - # remove _acq-* eddy_epi_prefix= dwiRaw.rsplit('_dwi.nii.gz')[0] eddy_epi_prefix= eddy_epi_prefix.replace('_acq-PA','') @@ -618,7 +621,8 @@ def output(self): # find dir field if '_dir-' in dwiRaw: - dir= load_nifti(dwiHcp).shape[3] + with open(pjoin(hcpEddyDir,'index.txt')) as f: + dir= len(f.read().split()) eddy_epi_prefix= local.path(re.sub('_dir-(.+?)_', f'_dir-{dir}_', eddy_epi_prefix)) dwi = local.path(eddy_epi_prefix+ '_dwi.nii.gz') @@ -646,14 +650,6 @@ def output(self): bse= local.path(bse_prefix.split('_desc-')[0]+ '_desc-'+ desc+ '_bse.nii.gz') - # create symlinks - if not isfile(dwi): - symlink(dwiHcp, dwi) - symlink(bvalHcp, bval) - symlink(bvecHcp, bvec) - symlink(maskHcp, mask) - symlink(bseHcp, bse) - return dict(dwi=dwi, bval=bval, bvec=bvec, bse=bse, mask=mask) @@ -736,7 +732,7 @@ def run(self): write_provenance(self, outDir) def output(self): - prefix= self.input().dirname.join('wma800',self.input().basename.split('.vtk')[0], + prefix= self.input().dirname.join('wma800',self.input().name.split('.vtk')[0], 'FiberClustering/SeparatedClusters') clusters=[] diff --git a/workflows/getenv.sh b/workflows/getenv.sh index 2ef5c0e..4400479 100755 --- a/workflows/getenv.sh +++ b/workflows/getenv.sh @@ -14,27 +14,23 @@ echo luigi-pnlpipe,`$cmd` > $log_file cd ../pnlNipype echo pnlNipype,`$cmd` >> $log_file -# pnlpipe hash -cd ../pnlpipe -echo pnlpipe,`$cmd` >> $log_file - -# ANTs, UKFTractography, dcm2niix, tract_querier hashes -cd pnlpipe_software -for s in ANTs UKFTractography dcm2niix tract_querier -do - hash_line=`grep "DEFAULT_HASH = " $s.py` - IFS=" = ", read -ra tmp <<< $hash_line - hash=`echo ${tmp[1]} | sed "s/'//g"` - echo $s,$hash >> $log_file -done - +# ANTs, UKFTractography, dcm2niix hashes +antsRegistration --version | head -n 1 | sed 's/ Version: /,/' >> $log_file +echo UKFTractography,$(cd $(dirname `which UKFTractography`) && git rev-parse --short=7 HEAD) >> $log_file +echo dcm2niix,`dcm2niix --version | tail -n 1` >> $log_file # FSL version -hash_line=`eddy_openmp --help 2>&1 | grep "Part of FSL"` -IFS=:, read -ra tmp <<< $hash_line -hash=`echo ${tmp[1]} | sed "s/)//"` -echo FSL,$hash >> $log_file +echo FSL,`cat $FSLDIR/etc/fslversion` >> $log_file # FreeSurfer version echo FreeSurfer,`cat $FREESURFER_HOME/build-stamp.txt` >> $log_file +# Linux version +echo Computer,`cat /etc/system-release` `uname -nr` >> $log_file + +# NVIDIA version +echo NVIDIA,`nvidia-smi | grep NVIDIA-SMI` >> $log_file + +# GPUs +echo GPUs,`nvidia-smi -L` >> $log_file + diff --git a/workflows/hcp_pnl_topup.lsf b/workflows/hcp_pnl_topup.lsf index a7a8acc..0ee49d0 100755 --- a/workflows/hcp_pnl_topup.lsf +++ b/workflows/hcp_pnl_topup.lsf @@ -1,84 +1,75 @@ #!/usr/bin/bash -# Useful wiki for executing this in PNL GPU machines -# https://github.com/pnlbwh/luigi-pnlpipe/wiki/Run-HCP-pipeline-on-PNL-GPU-machines-in-a-parallel-manner +# Copy this script to your project directory and edit as needed +# There are no arguments to this script. Execute it as ./hcp_pnl_topup.lsf -: << COMMENT -Running this script w/o GPU support would take a long time (>24 hours). -So always try to use a GPU enabled machine e.g. grx** nodes, eristwo gpu queue. -=> Copy this script to your project directory and update the marked variables. -=> Execute within shell as ./hcp_pnl_topup.lsf -=> To execute through LSF, adjust [1-N]%2 and "BSUB -n 4" as explained below. -COMMENT +# Useful wiki for executing this script in PNL GPU machines +# https://github.com/pnlbwh/luigi-pnlpipe/wiki/Run-HCP-pipeline-on-PNL-GPU-machines -# ============================================================================== - -#BSUB -J hcp-topup[1-N]%2 -#BSUB -q gpu -#BSUB -m ml001 -#BSUB -R rusage[mem=12000] -#BSUB -o /data/pnl/U01_HCP_Psychosis/data_processing/output/hcp-topup-%J-%I.out -#BSUB -e /data/pnl/U01_HCP_Psychosis/data_processing/output/hcp-topup-%J-%I.err -#BSUB -n 4 +: << COMMENT +HCP toolbox accepts an even number (2 or 4) of opposing templates. +You can write four templates in the following order: -# check LSB_JOBINDEX because it won't exist in /rfanfs/ -if [ ! -z ${LSB_JOBINDEX} ] -then - export CUDA_VISIBLE_DEVICES=$(( ${LSB_JOBINDEX}%2 )) -fi + PA template PA template + AP template AP template -: << COMMENT -The formula for CUDA_VISIBLE_DEVICES is ${LSB_JOBINDEX}%G, -which also means the maximum number of parallel cases you can process is G +Or two templates in the following order: -Adjust "BSUB -n 4" in a way that each GPU device can run no more than one job. -You can use the formula "BSUB -n N/G" to ensure that where-- - - N is the maximum number of jobs for that node - - G is the number of GPUs in that node -Otherwise your jobs might crash due to out of memory error. + PA template AP template -Example: node ml001 has 8(=N) job slots and 2(=G) GPUs so "BSUB -n 8/2" and "BSUB -J hcp-topup[1-N]%2" +Notes: + * Even if one of the templates is a single b0, name it like DWI with _dwi.nii.gz suffix. + * All b0 volumes need to be accompanied by .bval and .bvec files. Usually, the .bvec file is in 3xN format. COMMENT - bids_data_dir=/data/pnl/U01_HCP_Psychosis/data_processing/BIDS/rawdata -# write four templates in the following order -# change ses-1 in templates to the session you are processing -# PA template, PA template -# AP template, AP template -raw_template="sub-*/ses-1/dwi/*_ses-1_acq-PA_dir-99_dwi.nii.gz sub-*/ses-1/dwi/*_ses-1_acq-PA_dir-107_dwi.nii.gz \ - sub-*/ses-1/dwi/*_ses-1_acq-AP_dir-99_dwi.nii.gz sub-*/ses-1/dwi/*_ses-1_acq-AP_dir-107_dwi.nii.gz" -unr_template="*_ses-1_acq-PA_dir-99_desc-XcUn_dwi.nii.gz *_ses-1_acq-PA_dir-107_desc-XcUn_dwi.nii.gz \ - *_ses-1_acq-AP_dir-99_desc-XcUn_dwi.nii.gz *_ses-1_acq-AP_dir-107_desc-XcUn_dwi.nii.gz" +raw_template="sub-*/ses-*/dwi/*_acq-PA_dir-99_dwi.nii.gz sub-*/ses-*/dwi/*_acq-PA_dir-107_dwi.nii.gz \ + sub-*/ses-*/dwi/*_acq-AP_dir-99_dwi.nii.gz sub-*/ses-*/dwi/*_acq-AP_dir-107_dwi.nii.gz" -# a single caseid or a text file with list of cases +# a single case id or a text file with list of cases caselist=1004 +# a single session id +s=01 LUIGI_CONFIG_PATH=/data/pnl/soft/pnlpipe3/luigi-pnlpipe/params/hcp/dwi_pipe_params.cfg +CUDA_VERSION=10.2 # task is one of {HcpPipe,Ukf,Wma800} task=HcpPipe -# ============================================================================== -HcpOutDir=hcppipe +# You should not edit anything beyond this line +# ============================================================================== -export HCPPIPEDIR=/data/pnl/soft/pnlpipe3/HCPpipelines -export HCPPIPEDIR_Config=/data/pnl/soft/pnlpipe3/HCPpipelines/global/config -export HCPPIPEDIR_Global=/data/pnl/soft/pnlpipe3/HCPpipelines/global/scripts -export LUIGI_CONFIG_PATH cluster=`hostname | grep pnl-.*.partners.org` if [ -z $cluster ] then - source /data/pnl/soft/pnlpipe3/bashrc3-gpu + SOFTDIR=/data/pnl/soft/pnlpipe3 + source ${SOFTDIR}/bashrc3-gpu else - source /rfanfs/pnl-zorro/software/pnlpipe3/bashrc3-gpu + SOFTDIR=/software/rocky9/ + source ${SOFTDIR}/bashrc9 +fi + + +HcpOutDir=hcppipe + +export HCPPIPEDIR=${SOFTDIR}/HCPpipelines +export HCPPIPEDIR_Config=${SOFTDIR}/HCPpipelines/global/config +export HCPPIPEDIR_Global=${SOFTDIR}/HCPpipelines/global/scripts + +export LUIGI_CONFIG_PATH + +# check LSB_JOBINDEX because it won't exist in /rfanfs/ +if [ ! -z ${LSB_JOBINDEX} ] +then + export CUDA_VISIBLE_DEVICES=$(( ${LSB_JOBINDEX}%2 )) fi if [ -f ${caselist} ] @@ -90,24 +81,32 @@ else id=${caselist} fi - # luigi-pnlpipe upto GibbsUn so data can be organized according to BIDS for j in $(echo $raw_template) do - /data/pnl/soft/pnlpipe3/luigi-pnlpipe/workflows/ExecuteTask.py \ + ${SOFTDIR}/luigi-pnlpipe/workflows/ExecuteTask.py \ --bids-data-dir $bids_data_dir \ - --task GibbsUn -c $id \ + --task GibbsUn -c $id -s $s \ --dwi-template $j done # determine --path for HCP pipe template=($raw_template) -subdir=`echo ${template[0]} | sed "s+sub-\*+sub-$id+g"` -datadir=`dirname $bids_data_dir`/derivatives/pnlpipe/`dirname $subdir` +_subdir=`echo ${template[0]} | sed "s+sub-\*+sub-$id+g"` +subdir=`echo ${_subdir} | sed "s+ses-\*+ses-$s+g"` +datadir=`dirname $bids_data_dir`/derivatives/$USER-pnlpipe/`dirname $subdir` echo "HCP pipe data directory: $datadir" +template=() +for t in $(echo $raw_template) +do + base=`basename $t` + template+=(${base//_dwi/_desc-XcUn_dwi}) +done + + # HCP pipe using GibbsUn data if [ ! -f $datadir/$HcpOutDir/Diffusion/eddy/eddy_unwarped_images.nii.gz ] then @@ -117,23 +116,39 @@ then Command being issued to HCP pipeline: """ - template=($unr_template) - cmd="$HCPPIPEDIR/DiffusionPreprocessing/DiffPreprocPipeline.sh --path=$datadir \ - --subject=$HcpOutDir --cuda-version=9.1 \ - --posData=`ls $datadir/${template[0]}`@`ls $datadir/${template[1]}` \ - --negData=`ls $datadir/${template[2]}`@`ls $datadir/${template[3]}` \ - --echospacing=0.689998 --PEdir=2 --gdcoeffs=NONE \ - --extra-eddy-arg=--data_is_shelled --extra-eddy-arg=--repol --extra-eddy-arg=--verbose" + + if [ ${#template[@]} == 4 ] + then + cmd="$HCPPIPEDIR/DiffusionPreprocessing/DiffPreprocPipeline.sh --path=$datadir \ + --subject=$HcpOutDir --cuda-version=$CUDA_VERSION \ + --posData=`ls $datadir/${template[0]}`@`ls $datadir/${template[1]}` \ + --negData=`ls $datadir/${template[2]}`@`ls $datadir/${template[3]}` \ + --echospacing=0.689998 --PEdir=2 --gdcoeffs=NONE \ + --extra-eddy-arg=--data_is_shelled --extra-eddy-arg=--repol --extra-eddy-arg=--verbose" + elif [ ${#template[@]} == 2 ] + then + cmd="$HCPPIPEDIR/DiffusionPreprocessing/DiffPreprocPipeline.sh --path=$datadir \ + --subject=$HcpOutDir --cuda-version=$CUDA_VERSION \ + --posData=`ls $datadir/${template[0]}` \ + --negData=`ls $datadir/${template[1]}` \ + --echospacing=0.689998 --PEdir=2 --gdcoeffs=NONE \ + --extra-eddy-arg=--data_is_shelled --extra-eddy-arg=--repol --extra-eddy-arg=--verbose" + else + echo Unknown number of --posData, --negData. + echo Execute $HCPPIPEDIR/DiffusionPreprocessing/DiffPreprocPipeline.sh manually. + exit 1 + fi + echo $cmd echo '' $cmd || exit 1 + fi -# create symlinks template=($raw_template) -/data/pnl/soft/pnlpipe3/luigi-pnlpipe/workflows/ExecuteTask.py \ +${SOFTDIR}/luigi-pnlpipe/workflows/ExecuteTask.py \ --bids-data-dir $bids_data_dir \ --dwi-template ${template[0]} \ ---task $task -c $id +--task $task -c $id -s $s diff --git a/workflows/struct_pipe.py b/workflows/struct_pipe.py index fbbc1f3..0a96e2d 100755 --- a/workflows/struct_pipe.py +++ b/workflows/struct_pipe.py @@ -50,7 +50,7 @@ def run(self): def output(self): subject_dir= dirname(self.input().replace('rawdata', self.derivatives_dir)) - prefix= self.input().basename + prefix= self.input().name if '_T1w' in prefix: return local.path(pjoin(subject_dir, prefix.split('_T1w.nii')[0]+ '_desc-Xc_T1w.nii.gz')) @@ -134,7 +134,7 @@ def run(self): def output(self): - prefix= self.input().basename + prefix= self.input().name if self.mask_method.lower() in ['mabs','hd-bet']: desc= 'T1wXcMabs' if '_T1w' in prefix else 'T2wXcMabs' @@ -211,7 +211,7 @@ def run(self): def output(self): - prefix= self.input()['aligned'].basename + prefix= self.input()['aligned'].name if '_T1w' in prefix: outPrefix= pjoin(self.input()['aligned'].dirname, prefix.split('_T1w.nii')[0])