diff --git a/Dockerfile b/Dockerfile index b512ffb..d935d7a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -24,6 +24,4 @@ RUN mkdir /code RUN chmod +x analysis_script.sh -CMD ["/analysis_script.sh"] - -ENTRYPOINT [] +ENTRYPOINT ["/analysis_script.sh"] diff --git a/README.md b/README.md index bbd9e55..a732850 100644 --- a/README.md +++ b/README.md @@ -1,37 +1,106 @@ # Enigma-PD-WML -Segment White Mater Lesions (WML) in T1-weighted and FLAIR MRI images using FSL and U-Net +Segment White Matter Lesions (WML) in T1-weighted and FLAIR MRI images using FSL and U-Net -## Build and run the docker container +## What does the pipeline do? -- Clone this repository +This pipeline allows white matter lesions (WMLs) to be segmented from a subject's T1-weighted and FLAIR MRI images from +the same scanning session. The analysis steps (including pre- and post- processing) make use of the following tools: + +- [FSL (FMRIB Software Library)](https://fsl.fmrib.ox.ac.uk/fsl/docs/) : a library of analysis tools for FMRI, MRI and + diffusion brain imaging data. + +- [UNet-pgs](https://www.sciencedirect.com/science/article/pii/S1053811921004171?via%3Dihub) : A segmentation pipeline + for white matter hyperintensities (WMHs) using U-Net. + +- [MRIcroGL](https://www.nitrc.org/projects/mricrogl) : A tool for converting DICOM images to NIfTI format. + +The pipeline is available as a [Docker](https://www.docker.com/) or [Apptainer](https://apptainer.org/) container, +allowing it to be run on many different systems. + +## Installation + +If your MRI data isn't in NIfTI format, install [MRIcroGL from their website](https://www.nitrc.org/projects/mricrogl). + +If you want to run the container via Docker, install [Docker Desktop](https://docs.docker.com/get-started/get-docker/). +They have installation instructions for [Mac](https://docs.docker.com/desktop/install/mac-install/), +[Windows](https://docs.docker.com/desktop/install/windows-install/) and +[Linux](https://docs.docker.com/desktop/install/linux-install/) systems. + +If you want to use Apptainer instead, then follow the +[installation instructions on their website](https://apptainer.org/docs/user/main/quick_start.html). + +## Build the Docker / Apptainer image + +To build the image (in Docker or Apptainer), you have the following options: + +- Use the image from Docker Hub +- Build the image from source + +### Using the image from docker hub + +The image is available on docker hub in the +[enigma-pd-wml repository](https://hub.docker.com/r/hamiedaharoon24/enigma-pd-wml/tags). + +If you want to run the container via docker, you can download it by running: + +```bash +docker pull hamiedaharoon24/enigma-pd-wml:latest +``` + +If you want to run the container via Apptainer instead, use: + +```bash +apptainer build enigma-pd-wml.sif docker://hamiedaharoon24/enigma-pd-wml:latest +``` + +### Build the image from source + +Clone this github repository with: ```bash git clone https://github.com/UCL-ARC/Enigma-PD-WML.git ``` -- Build the docker image +Build the Docker image with: ```bash cd Enigma-PD-WML -docker build -f Dockerfile -t fsl_test . +docker build -f Dockerfile -t enigma-pd-wml . ``` -- Create `code` and `data` directories inside the `Enigma-PD-WML` directory +If you want to run the container via Apptainer, you can convert this Docker image into an Apptainer one via: -- Create a `subjects.txt` file at `Enigma-PD-WML/data/subjects.txt`. - This file should contain subject identifiers (one per line). +```bash +docker image save enigma-pd-wml -o enigma-pd-wml.tar +apptainer build enigma-pd-wml.sif docker-archive:enigma-pd-wml.tar +``` + +## Prepare your image data + +### Convert to NIfTI format + +If your images aren't in NIfTI format, you can use [MRIcroGL](https://www.nitrc.org/projects/mricrogl) to convert them. + +### Make directory structure + +Create a directory (anywhere on your computer) to hold your input image data and the generated results. + +Inside this directory: + +- Create `code` and `data` directories. The `code` folder should remain empty. + +- Inside the `data` folder, create a `subjects.txt` file that contains subject identifiers (one per line). - For each subject id: - - Create a directory at `Enigma-PD-WML/data/subject-id` (replacing 'subject-id' with the relevant id from - your `subjects.txt` file) + - Create a directory at `data/subject-id` (replacing 'subject-id' with the relevant id from your `subjects.txt` file) - Create a sub-directory inside the 'subject-id' directory called `niftis`. - Inside `niftis` place the subject's T1 MRI scan and FLAIR MRI scan. Both these files should be in nifti format - (ending `.nii.gz`) and contain either `T1` or `FLAIR` in their name respectively. + (ending `.nii.gz`) and contain either `T1` or `FLAIR` in their name respectively. -- Your final file structure should look like below (for two example subject ids): +Your final file structure should look like below (for two example subject ids): ```bash Enigma-PD-WML @@ -51,8 +120,39 @@ Enigma-PD-WML └───subjects.txt ``` -- Run the docker container. Make sure you are in the `Enigma-PD-WML` directory when you run this command. +## Run the container + +Below are various ways to run the container. For each, make sure you run the command from the top level of the +directory you made in the last section. + +By default, the pipeline will process your samples sequentially on 1 core. If you want to process them in parallel, add +the `-n` option to the end of the command: + +```bash +# Run with 5 jobs +-n 5 +``` + +The value after `-n` will determine the number of jobs that will run in parallel (a good default value is the number of +cores on your system). + +### Via docker (using image from docker hub) + +```bash +docker run -v "$(pwd)":/home -v "$(pwd)"/code:/code -v "$(pwd)"/data:/data hamiedaharoon24/enigma-pd-wml +``` + +### Via docker (using image built from source) + +```bash +docker run -v "$(pwd)":/home -v "$(pwd)"/code:/code -v "$(pwd)"/data:/data enigma-pd-wml +``` + +### Via apptainer + +You'll need to put the `.sif` file in the top level of the directory you made in the last section, or provide the full +path to its location. ```bash -./docker_runscript.sh +apptainer run --bind ${PWD}:/home --bind ${PWD}/code:/code --bind ${PWD}/data:/data enigma-pd-wml.sif ``` diff --git a/analysis_script.sh b/analysis_script.sh index ba73601..c1a0dc1 100644 --- a/analysis_script.sh +++ b/analysis_script.sh @@ -1,56 +1,42 @@ #!/bin/bash -# May need to add subjid to output log. - -# FSL Setup -FSLDIR=/usr/local/fsl -PATH=${FSLDIR}/share/fsl/bin:${PATH} -export FSLDIR PATH -. ${FSLDIR}/etc/fslconf/fsl.sh -set -exo - -# assign paths for code and input data directories -export code_dir=/code -export data_path=/data -echo code_dir : ${code_dir} >> ${code_dir}/logs.txt 2>&1 -echo data_path : ${data_path} >> ${code_dir}/logs.txt 2>&1 -echo >> ${code_dir}/logs.txt 2>&1 - -# assign path and filename of the list of subject IDs saved as a text file -export subjids_list=${data_path}/subjects.txt -echo subjids_list : ${subjids_list} >> ${code_dir}/logs.txt 2>&1 +set -euxo pipefail function runAnalysis (){ + # set options are lost when running this function in parallel + # so set them again here + set -euxo pipefail + export subjid=$1 - echo subjid : ${subjid} >> ${code_dir}/logs.txt 2>&1 - echo >> ${code_dir}/logs.txt 2>&1 + echo subjid : ${subjid} + echo # search full paths and filenames for input T1 and FLAIR images in compressed NIfTI format t1_fn=`find ${data_path}/${subjid}/niftis/*[Tt]1*.nii.gz` export t1_fn flair_fn=`find ${data_path}/${subjid}/niftis/*[Ff][Ll][Aa][Ii][Rr]*.nii.gz` export flair_fn - echo t1_fn : ${t1_fn} >> ${code_dir}/logs.txt 2>&1 - echo flair_fn : ${flair_fn} >> ${code_dir}/logs.txt 2>&1 - echo >> ${code_dir}/logs.txt 2>&1 + echo t1_fn : ${t1_fn} + echo flair_fn : ${flair_fn} + echo # assign path for output data directory and create it (if it doesn't exist) export data_outpath=${data_path}/UNet-pgs/${subjid} - mkdir -p ${data_outpath} >> ${code_dir}/logs.txt 2>&1 - echo data_outpath : ${data_outpath} >> ${code_dir}/logs.txt 2>&1 + mkdir -p ${data_outpath} + echo data_outpath : ${data_outpath} # REL # Why under code dir? # assign path for a temporary data directory under the code directory and create it export temp_dir=${code_dir}/Controls+PD/${subjid} - mkdir -p ${temp_dir} >> ${code_dir}/logs.txt 2>&1 - echo temp_dir : ${temp_dir} >> ${code_dir}/logs.txt 2>&1 - echo >> ${code_dir}/logs.txt 2>&1 + mkdir -p ${temp_dir} + echo temp_dir : ${temp_dir} + echo # change into temporary data directory and create input and output subdirectories # directories are required by flair cd ${temp_dir} - mkdir -p ${temp_dir}/input >> ${code_dir}/logs.txt 2>&1 - mkdir -p ${temp_dir}/output >> ${code_dir}/logs.txt 2>&1 + mkdir -p ${temp_dir}/input + mkdir -p ${temp_dir}/output # change into input directory ${temp_dir}/input # flirt expects to be ran in the same dir (maybe able to do this @@ -60,49 +46,47 @@ function runAnalysis (){ # copy input T1 and FLAIR images here, renaming them # files need to be renamed otherwise overwritten when fslroi is called. # also need to keep original file for flirt command - cp ${t1_fn} t1vol_orig.nii.gz >> ${code_dir}/logs.txt 2>&1 - cp ${flair_fn} flairvol_orig.nii.gz >> ${code_dir}/logs.txt 2>&1 + cp ${t1_fn} t1vol_orig.nii.gz + cp ${flair_fn} flairvol_orig.nii.gz # run FSL's fsl_anat tool on the input T1 image, with outputs # saved to a new subdirectory ${temp_dir}/input/t1-mni.anat - echo running fsl_anat on t1 in ${temp_dir}/input/t1-mni.anat/ >> ${code_dir}/logs.txt 2>&1 + echo running fsl_anat on t1 in ${temp_dir}/input/t1-mni.anat/ # flags this will stop fsl_anat going through unnecessary steps and generating outputs we don’t use. - fsl_anat -o t1-mni -i ./t1vol_orig.nii.gz --nosubcortseg >> ${code_dir}/logs.txt 2>&1 + fsl_anat -o t1-mni -i ./t1vol_orig.nii.gz --nosubcortseg - - - echo "fsl_anat done" >> ${code_dir}/logs.txt 2>&1 - echo >> ${code_dir}/logs.txt 2>&1 + echo "fsl_anat done" + echo # create new subdirectory to pre-process input FLAIR image, change # into it ${temp_dir}/input/flair-bet - mkdir ${temp_dir}/input/flair-bet >> ${code_dir}/logs.txt 2>&1 + mkdir ${temp_dir}/input/flair-bet cd ${temp_dir}/input/flair-bet # run FSL's tools on input FLAIR image to ensure mni orientation followed by brain extraction - echo preparing flair in ${temp_dir}/input/flair-bet/ >> ${code_dir}/logs.txt 2>&1 + echo preparing flair in ${temp_dir}/input/flair-bet/ fslreorient2std -m flair_orig2std.mat ../flairvol_orig.nii.gz flairvol - bet flairvol.nii.gz flairvol_brain -m -R -S -B -Z -v >> ${code_dir}/logs.txt 2>&1 + bet flairvol.nii.gz flairvol_brain -m -R -S -B -Z -v # run FSL's flirt tool to register/align FLAIR brain with T1 brain flirt -in flairvol_brain.nii.gz -omat flairbrain2t1brain.mat \ -out flairbrain2t1brain \ -bins 256 -cost normmi -searchrx 0 0 -searchry 0 0 -searchrz 0 0 -dof 6 \ - -interp trilinear -ref ../t1-mni.anat/T1_biascorr_brain.nii.gz >> ${code_dir}/logs.txt 2>&1 + -interp trilinear -ref ../t1-mni.anat/T1_biascorr_brain.nii.gz # run FSL's flirt tool to transform/align input FLAIR image (whole head) with T1 brain flirt -in flairvol.nii.gz -applyxfm -init flairbrain2t1brain.mat \ -out flairvol2t1brain \ - -paddingsize 0.0 -interp trilinear -ref ../t1-mni.anat/T1_biascorr_brain.nii.gz >> ${code_dir}/logs.txt 2>&1 + -paddingsize 0.0 -interp trilinear -ref ../t1-mni.anat/T1_biascorr_brain.nii.gz # run FSL's convert_xfm to invert FLAIR to T1 transformation matrix - convert_xfm -omat flairbrain2t1brain_inv.mat -inverse flairbrain2t1brain.mat >> ${code_dir}/logs.txt 2>&1 - echo "flair prep done" >> ${code_dir}/logs.txt 2>&1 - echo >> ${code_dir}/logs.txt 2>&1 + convert_xfm -omat flairbrain2t1brain_inv.mat -inverse flairbrain2t1brain.mat + echo "flair prep done" + echo # create new subdirectory to create distance map from ventricles in order to determine periventricular vs deep white matter, # change into it ${temp_dir}/input/vent_dist_mapping - mkdir ${temp_dir}/input/vent_dist_mapping >> ${code_dir}/logs.txt 2>&1 + mkdir ${temp_dir}/input/vent_dist_mapping cd ${temp_dir}/input/vent_dist_mapping # copy required images and transformation/warp coefficients from ${temp_dir}/input/t1-mni.anat here @@ -139,74 +123,74 @@ function runAnalysis (){ t1size=( $(fslsize ./t1-mni.anat/T1.nii.gz) ) if [ ${t1size[1]} -ge 500 ] || [ ${t1size[3]} -ge 500 ] then - fslroi ./t1-mni.anat/T1.nii.gz T1 20 472 8 496 0 -1 >> ${code_dir}/logs.txt 2>&1 - fslroi ./flair-bet/flairvol_trans2_t1brain.nii.gz FLAIR 20 472 8 496 0 -1 >> ${code_dir}/logs.txt 2>&1 + fslroi ./t1-mni.anat/T1.nii.gz T1 20 472 8 496 0 -1 + fslroi ./flair-bet/flairvol_trans2_t1brain.nii.gz FLAIR 20 472 8 496 0 -1 else - cp ./t1-mni.anat/T1.nii.gz T1.nii.gz >> ${code_dir}/logs.txt 2>&1 - cp ./flair-bet/flairvol2t1brain.nii.gz FLAIR.nii.gz >> ${code_dir}/logs.txt 2>&1 + cp ./t1-mni.anat/T1.nii.gz T1.nii.gz + cp ./flair-bet/flairvol2t1brain.nii.gz FLAIR.nii.gz fi # run FSL's flirt tool to register/align cropped T1 with full-fov T1 flirt -in T1.nii.gz -omat T1_croppedmore2roi.mat \ -out T1_croppedmore2roi \ -bins 256 -cost normmi -searchrx 0 0 -searchry 0 0 -searchrz 0 0 -dof 6 \ - -interp trilinear -ref ./t1-mni.anat/T1.nii.gz >> ${code_dir}/logs.txt 2>&1 + -interp trilinear -ref ./t1-mni.anat/T1.nii.gz # change one directory up to ${temp_dir} cd ${temp_dir} # run UNets-pgs in Singularity - echo running UNets-pgs Singularity in ${temp_dir} >> ${code_dir}/logs.txt 2>&1 + echo running UNets-pgs Singularity in ${temp_dir} - /WMHs_segmentation_PGS.sh T1.nii.gz FLAIR.nii.gz results.nii.gz ./input ./output >> ${code_dir}/logs.txt 2>&1 + /WMHs_segmentation_PGS.sh T1.nii.gz FLAIR.nii.gz results.nii.gz ./input ./output - echo UNets-pgs done! >> ${code_dir}/logs.txt 2>&1 - echo >> ${code_dir}/logs.txt 2>&1 + echo UNets-pgs done! + echo # change into output directory ${temp_dir}/output cd ${temp_dir}/output - echo processing outputs in ${temp_dir}/output/ >> ${code_dir}/logs.txt 2>&1 + echo processing outputs in ${temp_dir}/output/ - echo "copy required images" >> ${code_dir}/logs.txt 2>&1 + echo "copy required images" # copy required images and transformation/warp coefficients from ${temp_dir}/input here, renaming T1 and FLAIR - cp ${temp_dir}/input/T1_croppedmore2roi.mat . >> ${code_dir}/logs.txt 2>&1 - cp ${temp_dir}/input/t1-mni.anat/T1.nii.gz T1_roi.nii.gz >> ${code_dir}/logs.txt 2>&1 - cp ${temp_dir}/input/t1-mni.anat/T1_fullfov.nii.gz . >> ${code_dir}/logs.txt 2>&1 - cp ${temp_dir}/input/t1-mni.anat/T1_to_MNI_lin.mat . >> ${code_dir}/logs.txt 2>&1 - cp ${temp_dir}/input/t1-mni.anat/T1_to_MNI_nonlin_coeff.nii.gz . >> ${code_dir}/logs.txt 2>&1 - cp ${temp_dir}/input/t1-mni.anat/T1_roi2nonroi.mat . >> ${code_dir}/logs.txt 2>&1 - cp ${temp_dir}/input/flair-bet/flairbrain2t1brain_inv.mat . >> ${code_dir}/logs.txt 2>&1 - cp ${temp_dir}/input/flair-bet/flairvol.nii.gz FLAIR_orig.nii.gz >> ${code_dir}/logs.txt 2>&1 - cp ${temp_dir}/input/vent_dist_mapping/perivent_t1brain.nii.gz . >> ${code_dir}/logs.txt 2>&1 - cp ${temp_dir}/input/vent_dist_mapping/dwm_t1brain.nii.gz . >> ${code_dir}/logs.txt 2>&1 - cp ${temp_dir}/input/vent_dist_mapping/perivent_flairbrain.nii.gz . >> ${code_dir}/logs.txt 2>&1 - cp ${temp_dir}/input/vent_dist_mapping/dwm_flairbrain.nii.gz . >> ${code_dir}/logs.txt 2>&1 - - - tree ${temp_dir}/input/ >> ${code_dir}/logs.txt 2>&1 + cp ${temp_dir}/input/T1_croppedmore2roi.mat . + cp ${temp_dir}/input/t1-mni.anat/T1.nii.gz T1_roi.nii.gz + cp ${temp_dir}/input/t1-mni.anat/T1_fullfov.nii.gz . + cp ${temp_dir}/input/t1-mni.anat/T1_to_MNI_lin.mat . + cp ${temp_dir}/input/t1-mni.anat/T1_to_MNI_nonlin_coeff.nii.gz . + cp ${temp_dir}/input/t1-mni.anat/T1_roi2nonroi.mat . + cp ${temp_dir}/input/flair-bet/flairbrain2t1brain_inv.mat . + cp ${temp_dir}/input/flair-bet/flairvol.nii.gz FLAIR_orig.nii.gz + cp ${temp_dir}/input/vent_dist_mapping/perivent_t1brain.nii.gz . + cp ${temp_dir}/input/vent_dist_mapping/dwm_t1brain.nii.gz . + cp ${temp_dir}/input/vent_dist_mapping/perivent_flairbrain.nii.gz . + cp ${temp_dir}/input/vent_dist_mapping/dwm_flairbrain.nii.gz . + + + tree ${temp_dir}/input/ # copy MNI T1 template images here - cp ${FSLDIR}/data/standard/MNI152_T1_1mm.nii.gz . >> ${code_dir}/logs.txt 2>&1 - cp ${FSLDIR}/data/standard/MNI152_T1_1mm_brain.nii.gz . >> ${code_dir}/logs.txt 2>&1 + cp ${FSLDIR}/data/standard/MNI152_T1_1mm.nii.gz . + cp ${FSLDIR}/data/standard/MNI152_T1_1mm_brain.nii.gz . - echo "STEP 01" >> ${code_dir}/logs.txt 2>&1 + echo "STEP 01" # run FSL's flirt tool to transform/align WML segmentations from UNets-pgs with roi-cropped T1 flirt -in results.nii.gz -applyxfm -init T1_croppedmore2roi.mat \ -out results2t1roi \ - -paddingsize 0.0 -interp nearestneighbour -ref T1_roi.nii.gz >> ${code_dir}/logs.txt 2>&1 + -paddingsize 0.0 -interp nearestneighbour -ref T1_roi.nii.gz - echo "STEP 02" >> ${code_dir}/logs.txt 2>&1 + echo "STEP 02" # run FSL's flirt tool to transform/align WML segmentations from UNets-pgs with full-fov T1 flirt -in results2t1roi.nii.gz -applyxfm -init T1_roi2nonroi.mat \ -out results2t1fullfov \ - -paddingsize 0.0 -interp nearestneighbour -ref T1_fullfov.nii.gz >> ${code_dir}/logs.txt 2>&1 + -paddingsize 0.0 -interp nearestneighbour -ref T1_fullfov.nii.gz - echo "STEP 03" >> ${code_dir}/logs.txt 2>&1 + echo "STEP 03" # run FSL's flirt tool to transform/align WML segmentations with full-fov FLAIR flirt -in results2t1roi.nii.gz -applyxfm -init flairbrain2t1brain_inv.mat \ -out results2flairfullfov \ - -paddingsize 0.0 -interp nearestneighbour -ref FLAIR_orig.nii.gz >> ${code_dir}/logs.txt 2>&1 + -paddingsize 0.0 -interp nearestneighbour -ref FLAIR_orig.nii.gz # run FSL's fslmaths tool to divide WML segmentations from UNets-pgs into periventricular and deep white matter fslmaths results2t1roi.nii.gz -mul perivent_t1brain.nii.gz results2t1roi_perivent @@ -215,69 +199,110 @@ function runAnalysis (){ fslmaths results2flairfullfov.nii.gz -mul dwm_flairbrain.nii.gz results2flairfullfov_deep - echo "STEP 04" >> ${code_dir}/logs.txt 2>&1 + echo "STEP 04" # run FSL's flirt tool to linearly transform/align WML segmentations with MNI T1 flirt -in results2t1roi.nii.gz -applyxfm -init T1_to_MNI_lin.mat \ -out results2mni_lin \ - -paddingsize 0.0 -interp nearestneighbour -ref MNI152_T1_1mm_brain.nii.gz >> ${code_dir}/logs.txt 2>&1 + -paddingsize 0.0 -interp nearestneighbour -ref MNI152_T1_1mm_brain.nii.gz flirt -in results2t1roi_perivent.nii.gz -applyxfm -init T1_to_MNI_lin.mat \ -out results2mni_lin_perivent \ - -paddingsize 0.0 -interp nearestneighbour -ref MNI152_T1_1mm_brain.nii.gz >> ${code_dir}/logs.txt 2>&1 + -paddingsize 0.0 -interp nearestneighbour -ref MNI152_T1_1mm_brain.nii.gz flirt -in results2t1roi_deep.nii.gz -applyxfm -init T1_to_MNI_lin.mat \ -out results2mni_lin_deep \ - -paddingsize 0.0 -interp nearestneighbour -ref MNI152_T1_1mm_brain.nii.gz >> ${code_dir}/logs.txt 2>&1 + -paddingsize 0.0 -interp nearestneighbour -ref MNI152_T1_1mm_brain.nii.gz - echo "STEP 05" >> ${code_dir}/logs.txt 2>&1 + echo "STEP 05" # run FSL's applywarp tool to nonlinearly warp WML segmentations with MNI T1 applywarp --in=results2t1roi.nii.gz --warp=T1_to_MNI_nonlin_coeff.nii.gz \ --out=results2mni_nonlin \ - --interp=nn --ref=${FSLDIR}/data/standard/MNI152_T1_1mm_brain.nii.gz >> ${code_dir}/logs.txt 2>&1 + --interp=nn --ref=${FSLDIR}/data/standard/MNI152_T1_1mm_brain.nii.gz applywarp --in=results2t1roi_perivent.nii.gz --warp=T1_to_MNI_nonlin_coeff.nii.gz \ --out=results2mni_nonlin_perivent \ - --interp=nn --ref=${FSLDIR}/data/standard/MNI152_T1_1mm_brain.nii.gz >> ${code_dir}/logs.txt 2>&1 + --interp=nn --ref=${FSLDIR}/data/standard/MNI152_T1_1mm_brain.nii.gz applywarp --in=results2t1roi_deep.nii.gz --warp=T1_to_MNI_nonlin_coeff.nii.gz \ --out=results2mni_nonlin_deep \ - --interp=nn --ref=${FSLDIR}/data/standard/MNI152_T1_1mm_brain.nii.gz >> ${code_dir}/logs.txt 2>&1 + --interp=nn --ref=${FSLDIR}/data/standard/MNI152_T1_1mm_brain.nii.gz # copy all contents of temporary data directory to output data directory, and delete temporary data directory - echo copying all contents >> ${code_dir}/logs.txt 2>&1 - echo from ${temp_dir} >> ${code_dir}/logs.txt 2>&1 - echo to ${data_outpath} >> ${code_dir}/logs.txt 2>&1 - cp -r ${temp_dir}/* ${data_outpath} >> ${code_dir}/logs.txt 2>&1 + echo copying all contents + echo from ${temp_dir} + echo to ${data_outpath} + cp -r ${temp_dir}/* ${data_outpath} # echo deleting ${temp_dir} # rm -r ${temp_dir} - echo all done! >> ${code_dir}/logs.txt 2>&1 - echo >> ${code_dir}/logs.txt 2>&1 + echo all done! + echo # change to ${data_outpath} cd ${data_outpath} - zip -u ${subjid}_results.zip ./output/results2mni_lin*.nii.gz ./output/results2mni_nonlin*.nii.gz + zip -uq ${subjid}_results.zip ./output/results2mni_lin*.nii.gz ./output/results2mni_nonlin*.nii.gz - echo ===================================================== >> ${code_dir}/logs.txt 2>&1 - echo please send this zip file to the ENIGMA-PD-Vasc team! >> ${code_dir}/logs.txt 2>&1 - echo ${data_outpath}/${subjid}_results.zip >> ${code_dir}/logs.txt 2>&1 - echo ===================================================== >> ${code_dir}/logs.txt 2>&1 - echo >> ${code_dir}/logs.txt 2>&1 - echo Thank you! >> ${code_dir}/logs.txt 2>&1 - echo >> ${code_dir}/logs.txt 2>&1 + echo ===================================================== + echo please send this zip file to the ENIGMA-PD-Vasc team! + echo ${data_outpath}/${subjid}_results.zip + echo ===================================================== + echo + echo Thank you! + echo } -# RUN IN SERIAL -#for subjid in `cat ${subjids_list}`; -#do runAnalysis $subjid -#done +# assign paths for code and input data directories, as well as overall log file +export code_dir=/code +export data_path=/data +overall_log=${code_dir}/overall_log.txt -# RUN IN PARALLEL -export -f runAnalysis -cat ${subjids_list} | parallel runAnalysis +echo code_dir : ${code_dir} >> $overall_log 2>&1 +echo data_path : ${data_path} >> $overall_log 2>&1 +echo >> $overall_log 2>&1 -#done; +# FSL Setup +FSLDIR=/usr/local/fsl +PATH=${FSLDIR}/share/fsl/bin:${PATH} +export FSLDIR PATH +. ${FSLDIR}/etc/fslconf/fsl.sh + +# Create dir to hold sample logs +sample_logs_dir=${code_dir}/sample_logs +mkdir -p $sample_logs_dir >> $overall_log 2>&1 + +# assign path and filename of the list of subject IDs saved as a text file +export subjids_list=${data_path}/subjects.txt +echo subjids_list : ${subjids_list} >> $overall_log 2>&1 + +# Read -n argument, to give number of jobs to use for parallel processing +# If n=1 (or isn't specified), run sequentially +n=1 +while getopts "n:" opt; do + case ${opt} in + n) + n=${OPTARG} + ;; + ?) + echo "Invalid option: -${OPTARG}." >> $overall_log 2>&1 + exit 1 + ;; + esac +done + +if [[ $n -eq 1 ]] +then + echo "Running sequentially on 1 core" >> $overall_log 2>&1 + for subjid in $(cat ${subjids_list}); + do + echo "Processing sample with id ${subjid}" >> $overall_log 2>&1 + runAnalysis $subjid > $sample_logs_dir/$subjid-log.txt 2>&1 + done +else + echo "Running in parallel with ${n} jobs" >> $overall_log 2>&1 + export -f runAnalysis + cat ${subjids_list} | parallel --jobs ${n} runAnalysis {} ">" $sample_logs_dir/{}-log.txt "2>&1" +fi diff --git a/docker_buildscript.sh b/docker_buildscript.sh index 6d03ac8..bbcaa6e 100644 --- a/docker_buildscript.sh +++ b/docker_buildscript.sh @@ -1 +1 @@ -docker build -f Dockerfile -t fsl_test . +docker build -f Dockerfile -t enigma-pd-wml . diff --git a/docker_runscript.sh b/docker_runscript.sh index 4e75cf6..bb19666 100644 --- a/docker_runscript.sh +++ b/docker_runscript.sh @@ -1,3 +1,3 @@ date > docker_log.txt -docker run -v "$(pwd)":/home -v "$(pwd)"/code:/code -v "$(pwd)"/data:/data fsl_test >> docker_log.txt 2>&1 +docker run -v "$(pwd)":/home -v "$(pwd)"/code:/code -v "$(pwd)"/data:/data enigma-pd-wml >> docker_log.txt 2>&1 date >> docker_log.txt