diff --git a/Dockerfile b/Dockerfile index 20177a8..4fd474c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -94,12 +94,9 @@ RUN wget afni.nimh.nih.gov/pub/dist/tgz/linux_ubuntu_16_64.tgz \ && tar -xzf linux_ubuntu_16_64.tgz -C $HOME/afni \ && rm -rf linux_ubuntu_16_64.tgz -ENV AFNIPATH="$HOME/ants-v2.3.1/bin" \ +ENV AFNIPATH="$HOME/afni/linux_ubuntu_16_64/" \ PATH="$HOME/afni/linux_ubuntu_16_64:$PATH" - -### Local CIC VM from https://github.com/CobraLab/MINC-VM/blob/master/provision.sh - RUN apt-get update && \ apt-get install -y --no-install-recommends htop nano wget imagemagick parallel zram-config debconf @@ -111,6 +108,8 @@ RUN echo ttf-mscorefonts-installer msttcorefonts/accepted-mscorefonts-eula selec zenity libcurl4-openssl-dev bc gawk libxkbcommon-x11-0 \ ttf-mscorefonts-installer bc +#Install python environment + ENV CONDA_DIR="$HOME/miniconda-latest" \ PATH="$HOME/miniconda-latest/bin:$PATH" \ ND_ENTRYPOINT="$HOME/startup.sh" @@ -131,111 +130,6 @@ RUN export PATH="$HOME/miniconda-latest/bin:$PATH" \ && rm -rf ~/.cache/pip/* \ && sync -# set paths -ENV minc_toolkit_v2=https://packages.bic.mni.mcgill.ca/minc-toolkit/Debian/minc-toolkit-1.9.17-20190313-Ubuntu_18.04-x86_64.deb \ - minc_toolkit_v1=https://packages.bic.mni.mcgill.ca/minc-toolkit/Debian/minc-toolkit-1.0.09-20170529-Ubuntu_18.04-x86_64.deb \ - bic_mni_models=http://packages.bic.mni.mcgill.ca/minc-toolkit/Debian/bic-mni-models-0.1.1-20120421.deb \ - beast_library=http://packages.bic.mni.mcgill.ca/minc-toolkit/Debian/beast-library-1.1.0-20121212.deb \ - pyminc=https://github.com/Mouse-Imaging-Centre/pyminc/archive/v0.52.tar.gz \ - minc_stuffs=https://github.com/Mouse-Imaging-Centre/minc-stuffs/archive/v0.1.24.tar.gz \ - pyezminc=https://github.com/BIC-MNI/pyezminc/archive/release-1.2.01.tar.gz \ - quarter=https://github.com/Alexpux/Quarter/archive/master.tar.gz \ - bicinventor=https://github.com/BIC-MNI/bicInventor/archive/master.tar.gz \ - brain_view2=https://github.com/Mouse-Imaging-Centre/brain-view2/archive/master.tar.gz \ - itksnap_minc=http://www.bic.mni.mcgill.ca/~vfonov/temp/itksnap-3.4.0-20151130-Linux-x86_64-qt4.tar.gz \ - mni_cortical_statistics=https://github.com/BIC-MNI/mni.cortical.statistics/archive/ver-0_9_5.tar.gz \ - generate_deformation_fields=https://github.com/Mouse-Imaging-Centre/generate_deformation_fields/archive/1.0.1.tar.gz \ - pydpiper=https://github.com/Mouse-Imaging-Centre/pydpiper/archive/v2.0.13.tar.gz \ - bpipe=https://github.com/ssadedin/bpipe/releases/download/0.9.9.6/bpipe-0.9.9.6.tar.gz - - -#Download and install external debs -RUN wget --progress=dot:mega $minc_toolkit_v2 && \ - wget --progress=dot:mega $minc_toolkit_v1 && \ - wget --progress=dot:mega $bic_mni_models && \ - wget --progress=dot:mega $beast_library && \ - for file in *.deb; do gdebi --n $file; done && \ - rm -f *.deb - - -RUN echo '. /opt/minc/1.9.17/minc-toolkit-config.sh' >> $HOME/.bashrc && \ - echo 'export LD_LIBRARY_PATH=/opt/minc/1.9.17/lib' >> $HOME/.bashrc && \ - echo 'export PATH=/opt/minc-toolkit-extras/:$PATH' >> $HOME/.bashrc - -#Enable minc-toolkit in this script, need to escape error checking -RUN set +u && \ - . /opt/minc/1.9.17/minc-toolkit-config.sh && \ - set -u - -#Download other packages -RUN wget --progress=dot:mega $pyminc -O pyminc.tar.gz && \ - wget --progress=dot:mega $minc_stuffs -O minc-stuffs.tar.gz && \ - wget --progress=dot:mega $pyezminc -O pyezminc.tar.gz && \ - wget --progress=dot:mega $generate_deformation_fields -O generate_deformation_fields.tar.gz && \ - wget --progress=dot:mega $pydpiper -O pydpiper.tar.gz && \ - wget --progress=dot:mega $bpipe -O bpipe.tar.gz && \ - wget https://raw.githubusercontent.com/andrewjanke/volgenmodel/master/volgenmodel -O /usr/local/bin/volgenmodel && \ - git clone https://github.com/CobraLab/minc-toolkit-extras.git /opt/minc-toolkit-extras - - -#Do this so that we don't need to keep track of version numbers for build -RUN mkdir -p pyminc && tar xzvf pyminc.tar.gz -C pyminc --strip-components 1 && \ - mkdir -p minc-stuffs && tar xzvf minc-stuffs.tar.gz -C minc-stuffs --strip-components 1 && \ - mkdir -p generate_deformation_fields && tar xzvf generate_deformation_fields.tar.gz -C generate_deformation_fields --strip-components 1 && \ - mkdir -p pyezminc && tar xzvf pyezminc.tar.gz -C pyezminc --strip-components 1 && \ - mkdir -p pydpiper && tar xzvf pydpiper.tar.gz -C pydpiper --strip-components 1 && \ - mkdir -p /opt/bpipe && tar xzvf bpipe.tar.gz -C /opt/bpipe --strip-components 1 && ln -s /opt/bpipe/bin/bpipe /usr/local/bin/bpipe - - -#Build and install packages -RUN ( cd pyezminc && python setup.py install --mincdir /opt/minc/1.9.17 ) && \ - ( cd pyminc && python setup.py install ) && \ - ( cd minc-stuffs && ./autogen.sh && ./configure --with-build-path=/opt/minc/1.9.17 && make && make install && python setup.py install ) && \ - ( cd generate_deformation_fields && ./autogen.sh && ./configure --with-minc2 --with-build-path=/opt/minc/1.9.17 && make && make install) && \ - ( cd generate_deformation_fields/scripts && python setup.py build_ext --inplace && python setup.py install) && \ - ( cd pydpiper && python setup.py install) - -RUN pip install -U qbatch==2.1.5 && \ - rm -rf pyezminc* pyminc* minc-stuffs* generate_deformation_fields* pydpiper* bpipe* - -#Installing brain-view2 -RUN apt install -y --no-install-recommends libcoin80-dev libpcre++-dev qt4-default libqt4-opengl-dev libtool && \ - wget $quarter -O quarter.tar.gz && \ - wget $bicinventor -O bicinventor.tar.gz && \ - wget $brain_view2 -O brain-view2.tar.gz && \ - mkdir quarter && tar xzvf quarter.tar.gz -C quarter --strip-components 1 && \ - mkdir bicinventor && tar xzvf bicinventor.tar.gz -C bicinventor --strip-components 1 && \ - mkdir brain-view2 && tar xzvf brain-view2.tar.gz -C brain-view2 --strip-components 1 && \ - ( cd quarter && cmake . && make && make install ) && \ - ( cd bicinventor && ./autogen.sh && ./configure --with-build-path=/opt/minc/1.9.17 --prefix=/opt/minc/1.9.17 --with-minc2 && make && make install ) && \ - ( cd brain-view2 && /usr/bin/qmake-qt4 MINCDIR=/opt/minc/1.9.17 HDF5DIR=/opt/minc/1.9.17 INVENTORDIR=/opt/minc/1.9.17 && make && cp brain-view2 /opt/minc/1.9.17/bin ) && \ - rm -rf quarter* bicinventor* brain-view2* - -#Install itksnap-MINC -RUN wget $itksnap_minc -O itksnap_minc.tar.gz && \ - tar xzvf itksnap_minc.tar.gz -C /usr/local --strip-components 1 && \ - rm -f itksnap_minc.tar.gz - -ENV export MINC_PATH=/opt/minc/1.9.17 \ - export PATH=${OLDPATH} - -#Purge unneeded packages -RUN apt-get purge $(dpkg -l | tr -s ' ' | cut -d" " -f2 | sed 's/:amd64//g' | grep -e -E '(-dev|-doc)$') - -#Remove a hunk of useless packages which seem to be safe to remove -RUN apt-get -y purge printer-driver.* xserver-xorg-video.* xscreensaver.* wpasupplicant wireless-tools .*vdpau.* \ - bluez-cups cups-browsed cups-bsd cups-client cups-common cups-core-drivers cups-daemon cups-filters \ - cups-filters-core-drivers cups-ppdc cups-server-common linux-headers.* snapd bluez linux-firmware .*sane.* .*ppds.* && \ - apt-get -y clean && \ - apt-get -y --purge autoremove - -#Cleanup to ensure extra files aren't packed into VM -RUN cd ~ && \ - rm -rf /tmp/provision && \ - rm -f /var/cache/apt/archives/*.deb && \ - rm -rf /var/lib/apt/lists/* - - #### install RABIES ENV export LD_LIBRARY_PATH=/opt/minc/1.9.17/lib \ diff --git a/README.md b/README.md index f25d5ca..a2f410c 100644 --- a/README.md +++ b/README.md @@ -4,12 +4,16 @@ ## Command Line Interface ``` -usage: rabies [-h] [-e BOLD_ONLY] [-c COMMONSPACE_METHOD] [-b BIAS_REG_SCRIPT] - [-r COREG_SCRIPT] [-p PLUGIN] [-d DEBUG] [-v VERBOSE] +usage: rabies [-h] [--bids_input BIDS_INPUT] [-e BOLD_ONLY] + [-c COMMONSPACE_METHOD] [-b BIAS_REG_SCRIPT] [-r COREG_SCRIPT] + [-p PLUGIN] [--min_proc MIN_PROC] [-d DEBUG] [-v VERBOSE] + [--isotropic_resampling ISOTROPIC_RESAMPLING] + [--upsampling UPSAMPLING] [--data_type DATA_TYPE] [--cluster_type {local,sge,pbs,slurm}] [--walltime WALLTIME] [--memory_request MEMORY_REQUEST] - [--local_threads LOCAL_THREADS] [--STC STC] [--TR TR] - [--tpattern TPATTERN] [--anat_template ANAT_TEMPLATE] + [--local_threads LOCAL_THREADS] + [--template_reg_script TEMPLATE_REG_SCRIPT] [--STC STC] + [--TR TR] [--tpattern TPATTERN] [--anat_template ANAT_TEMPLATE] [--brain_mask BRAIN_MASK] [--WM_mask WM_MASK] [--CSF_mask CSF_MASK] [--labels LABELS] [--csv_labels CSV_LABELS] @@ -26,6 +30,11 @@ positional arguments: optional arguments: -h, --help show this help message and exit + --bids_input BIDS_INPUT + If the provided input data folder is in the BIDS + format to use the BIDS reader.Note that all .nii + inputs will be converted to compressed .gz format. + (default: False) -e BOLD_ONLY, --bold_only BOLD_ONLY preprocessing with only EPI scans. commonspace registration and distortion correction is executed @@ -42,9 +51,9 @@ optional arguments: (default: Rigid) -r COREG_SCRIPT, --coreg_script COREG_SCRIPT Specify EPI to anat coregistration script. Built-in - options include 'Rigid', 'Affine' and 'SyN' (non- - linear), but can specify a custom registration script - following the template script structure (see + options include 'Rigid', 'Affine', 'SyN' (non-linear) + and 'light_SyN', but can specify a custom registration + script following the template script structure (see RABIES/rabies/shell_scripts/ for template). (default: SyN) -p PLUGIN, --plugin PLUGIN @@ -52,12 +61,28 @@ optional arguments: Consult nipype plugin documentation for detailed options. Linear, MultiProc, SGE and SGEGraph have been tested. (default: Linear) + --min_proc MIN_PROC For parallel processing, specify the minimal number of + nodes to be assigned. (default: 1) -d DEBUG, --debug DEBUG Run in debug mode. Default=False (default: False) -v VERBOSE, --verbose VERBOSE Increase output verbosity. **doesn't do anything for now. (default: False) +Options for the resampling of the EPI for motion realignment, susceptibility distortion correction and common space resampling:: + --isotropic_resampling ISOTROPIC_RESAMPLING + Whether to resample the EPI to an isotropic resolution + based on the lowest dimension. (default: False) + --upsampling UPSAMPLING + Can specify a upsampling parameter to increase the EPI + resolution upon resampling and minimize information + lost from the transform interpolation. (default: 1.0) + --data_type DATA_TYPE + Specify resampling data format to control for file + size. Can specify a numpy data type from https://docs. + scipy.org/doc/numpy/user/basics.types.html. (default: + float64) + cluster options if commonspace method is ants_dbm (taken from twolevel_dbm.py):: --cluster_type {local,sge,pbs,slurm} Choose the type of cluster system to submit jobs to @@ -67,10 +92,16 @@ cluster options if commonspace method is ants_dbm (taken from twolevel_dbm.py):: --memory_request MEMORY_REQUEST Option for job submission specifying requested memory per pairwise registration. (default: 8gb) - --local_threads LOCAL_THREADS, -j LOCAL_THREADS + --local_threads LOCAL_THREADS For local execution, how many subject-wise modelbuilds to run in parallel, defaults to number of CPUs - (default: 8) + (default: 2) + --template_reg_script TEMPLATE_REG_SCRIPT + Registration script that will be used for registration + of the generated template to the provided atlas for + masking and labeling. Can choose a predefined + registration script among Rigid,Affine,SyN or + light_SyN, or provide a custom script. (default: SyN) Specify Slice Timing Correction info that is fed to AFNI 3dTshift.: --STC STC Whether to run STC or not. (default: True) @@ -82,46 +113,47 @@ Specify Slice Timing Correction info that is fed to AFNI 3dTshift.: Template files.: --anat_template ANAT_TEMPLATE Anatomical file for the commonspace template. - (default: /home/cic/desgab/RABIES/DSURQE_atlas/nifti/D - SURQE_100micron_average.nii.gz) + (default: /home/gabriel/RABIES/DSURQE_atlas/nifti/DSUR + QE_100micron_average.nii.gz) --brain_mask BRAIN_MASK - Brain mask for the template. (default: /home/cic/desga - b/RABIES/DSURQE_atlas/nifti/DSURQE_100micron_mask.nii. - gz) - --WM_mask WM_MASK White matter mask for the template. (default: /home/ci - c/desgab/RABIES/DSURQE_atlas/nifti/DSURQE_100micron_er - oded_WM_mask.nii.gz) - --CSF_mask CSF_MASK CSF mask for the template. (default: /home/cic/desgab/ - RABIES/DSURQE_atlas/nifti/DSURQE_100micron_eroded_CSF_ - mask.nii.gz) - --labels LABELS Atlas file with anatomical labels. (default: /home/cic - /desgab/RABIES/DSURQE_atlas/nifti/DSURQE_100micron_lab - els.nii.gz) + Brain mask for the template. (default: /home/gabriel/R + ABIES/DSURQE_atlas/nifti/DSURQE_100micron_mask.nii.gz) + --WM_mask WM_MASK White matter mask for the template. (default: /home/ga + briel/RABIES/DSURQE_atlas/nifti/DSURQE_100micron_erode + d_WM_mask.nii.gz) + --CSF_mask CSF_MASK CSF mask for the template. (default: /home/gabriel/RAB + IES/DSURQE_atlas/nifti/DSURQE_100micron_eroded_CSF_mas + k.nii.gz) + --labels LABELS Atlas file with anatomical labels. (default: /home/gab + riel/RABIES/DSURQE_atlas/nifti/DSURQE_100micron_labels + .nii.gz) --csv_labels CSV_LABELS - csv file with info on the labels. (default: /home/cic/ - desgab/RABIES/DSURQE_atlas/DSURQE_40micron_R_mapping.c - sv) + csv file with info on the labels. (default: /home/gabr + iel/RABIES/DSURQE_atlas/DSURQE_40micron_R_mapping.csv) ``` ## Example execution command ``` - rabies -e 0 -c ants_dbm -b default -r SyN -p SGEGraph -d 0 -v 0 nii_inputs/ rabies_outputs/ + rabies nii_inputs/ rabies_outputs/ -e 0 -c ants_dbm -b default -r SyN -p SGEGraph -d 0 -v 0 ``` -## Input data folder structure -* input_folder/subject_id/ses-#/bold/subject_id_ses-#_run-#_bold.nii.gz -* input_folder/subject_id/ses-#/anat/subject_id_ses-#_anat.nii.gz +# Input data folder structure +Input folder can follow either the BIDS structure (https://bids.neuroimaging.io/) or the following: +* input_folder/sub-subject_id/ses-#/func/sub-subject_id_ses-#_run-#_bold.nii.gz +* input_folder/sub-subject_id/ses-#/anat/sub-subject_id_ses-#_anat.nii.gz * input_folder/data_info.csv (with columns header: group,subject_id,num_session,num_run) - + - + + Directory Tree + -

Example Directory Tree

- nii_inputs/
- └── nii_inputs
-     ├── data_info.csv
-     ├── jgrAesAWc11L
-     │   ├── ses-1
-     │   │   ├── anat
-     │   │   │   └── jgrAesAWc11L_ses-1_anat.nii.gz
-     │   │   └── bold
-     │   │       ├── jgrAesAWc11L_ses-1_run-1_bold.nii.gz
-     │   │       ├── jgrAesAWc11L_ses-1_run-2_bold.nii.gz
-     │   │       └── jgrAesAWc11L_ses-1_run-3_bold.nii.gz
-     │   └── ses-2
-     │       ├── anat
-     │       │   └── jgrAesAWc11L_ses-2_anat.nii.gz
-     │       └── bold
-     │           ├── jgrAesAWc11L_ses-2_run-1_bold.nii.gz
-     │           ├── jgrAesAWc11L_ses-2_run-2_bold.nii.gz
-     │           └── jgrAesAWc11L_ses-2_run-3_bold.nii.gz
-     ├── jgrAesAWc11R
-     │   ├── ses-1
-     │   │   ├── anat
-     │   │   │   └── jgrAesAWc11R_ses-1_anat.nii.gz
-     │   │   └── bold
-     │   │       ├── jgrAesAWc11R_ses-1_run-1_bold.nii.gz
-     │   │       ├── jgrAesAWc11R_ses-1_run-2_bold.nii.gz
-     │   │       └── jgrAesAWc11R_ses-1_run-3_bold.nii.gz
-     │   └── ses-2
-     │       ├── anat
-     │       │   └── jgrAesAWc11R_ses-2_anat.nii.gz
-     │       └── bold
-     │           ├── jgrAesAWc11R_ses-2_run-1_bold.nii.gz
-     │           ├── jgrAesAWc11R_ses-2_run-2_bold.nii.gz
-     │           └── jgrAesAWc11R_ses-2_run-3_bold.nii.gz
-     └── jgrAesAWc11R1L
-         ├── ses-1
-         │   ├── anat
-         │   │   └── jgrAesAWc11R1L_ses-1_anat.nii.gz
-         │   └── bold
-         │       ├── jgrAesAWc11R1L_ses-1_run-1_bold.nii.gz
-         │       ├── jgrAesAWc11R1L_ses-1_run-2_bold.nii.gz
-         │       └── jgrAesAWc11R1L_ses-1_run-3_bold.nii.gz
-         └── ses-2
-             ├── anat
-             │   └── jgrAesAWc11R1L_ses-2_anat.nii.gz
-             └── bold
-                 ├── jgrAesAWc11R1L_ses-2_run-1_bold.nii.gz
-                 ├── jgrAesAWc11R1L_ses-2_run-2_bold.nii.gz
-                 └── jgrAesAWc11R1L_ses-2_run-3_bold.nii.gz
+

Example Directory Tree

+ nii_input
+ ├── data_info.csv
+ ├── sub-jgrAesAWc11L
+ │   ├── ses-1
+ │   │   ├── anat
+ │   │   │   └── sub-jgrAesAWc11L_ses-1_anat.nii.gz
+ │   │   └── func
+ │   │       ├── sub-jgrAesAWc11L_ses-1_run-1_bold.nii.gz
+ │   │       ├── sub-jgrAesAWc11L_ses-1_run-2_bold.nii.gz
+ │   │       └── sub-jgrAesAWc11L_ses-1_run-3_bold.nii.gz
+ │   └── ses-2
+ │       ├── anat
+ │       │   └── sub-jgrAesAWc11L_ses-2_anat.nii.gz
+ │       └── func
+ │           ├── sub-jgrAesAWc11L_ses-2_run-1_bold.nii.gz
+ │           ├── sub-jgrAesAWc11L_ses-2_run-2_bold.nii.gz
+ │           └── sub-jgrAesAWc11L_ses-2_run-3_bold.nii.gz
+ └── sub-jgrAesAWc11R
+     ├── ses-1
+     │   ├── anat
+     │   │   └── sub-jgrAesAWc11R_ses-1_anat.nii.gz
+     │   └── func
+     │       ├── sub-jgrAesAWc11R_ses-1_run-1_bold.nii.gz
+     │       ├── sub-jgrAesAWc11R_ses-1_run-2_bold.nii.gz
+     │       └── sub-jgrAesAWc11R_ses-1_run-3_bold.nii.gz
+     └── ses-2
+         ├── anat
+         │   └── sub-jgrAesAWc11R_ses-2_anat.nii.gz
+         └── func
+             ├── sub-jgrAesAWc11R_ses-2_run-1_bold.nii.gz
+             ├── sub-jgrAesAWc11R_ses-2_run-2_bold.nii.gz
+             └── sub-jgrAesAWc11R_ses-2_run-3_bold.nii.gz


-22 directories, 25 files +14 directories, 17 files


- tree v1.6.0 © 1996 - 2011 by Steve Baker and Thomas Moore
+ tree v1.7.0 © 1996 - 2014 by Steve Baker and Thomas Moore
HTML output hacked and copyleft © 1998 by Francesc Rocher
+ JSON output hacked and copyleft © 2014 by Florian Sesser
Charsets / OS/2 support © 2001 by Kyosuke Tokoro

@@ -219,6 +237,16 @@ After installing the container from the Dockerfile, can run RABIES interactively ```
+# Managing outputs +Important outputs will be found in datasink folders: +* anat_datasink: Includes outputs specific to the anatomical workflow +* bold_datasink: Includes corrected EPI timeseries, EPI masks and key EPI outputs from the preprocessing workflow +* commonspace_datasink: Outputs from the common space registration +* transforms_datasink: Contains all transforms +* confounds_datasink: contains outputs to use for confound regression steps + +
+ ## Publications * Gabriel Desrosiers-Gregoire, Gabriel A. Devenyi, Joanes Grandjean, M. Mallar Chakravarty. Recurrent functional connectivity gradients identified along specific frequency bands of oscillatory coherence and across anesthesia protocols for mouse fMRI. Presented at Society for Neuroscience 2019, Chicago, IL * Gabriel Desrosiers-Gregoire, Gabriel A. Devenyi, Joanes Grandjean, M. Mallar Chakravarty. (2019) Dynamic functional connectivity properties are differentially affected by anesthesia protocols and compare across species. Presented at Organization for Human Brain Mapping 2019, Rome, Italy @@ -227,7 +255,3 @@ After installing the container from the Dockerfile, can run RABIES interactively ## References * fmriprep - https://github.com/poldracklab/fmriprep -* minc-toolkit v2 - https://github.com/BIC-MNI/minc-toolkit-v2 -* minc-stuffs - https://github.com/Mouse-Imaging-Centre/minc-stuffs -* minc2-simple - https://github.com/vfonov/minc2-simple -* pydpiper - https://github.com/Mouse-Imaging-Centre/pydpiper diff --git a/dependencies.txt b/dependencies.txt index bd1684f..d99a62a 100644 --- a/dependencies.txt +++ b/dependencies.txt @@ -24,6 +24,7 @@ nilearn>=0.4.2 nipype>=1.1.4 tqdm pathos +pybids # Optional MINC dependencies minc-toolkit>=1.9.17 diff --git a/rabies/_info.py b/rabies/_info.py new file mode 100644 index 0000000..a8e2be3 --- /dev/null +++ b/rabies/_info.py @@ -0,0 +1,7 @@ +__version__ = "0.1.0" +__packagename__ = 'RABIES' +__url__ = 'https://github.com/CoBrALab/' + +DOWNLOAD_URL = ( + 'https://github.com/CoBrALab/{name}/archive/{ver}.tar.gz'.format( + name=__packagename__, ver=__version__)) diff --git a/rabies/main_wf.py b/rabies/main_wf.py index dfc1e3f..6a41a3a 100644 --- a/rabies/main_wf.py +++ b/rabies/main_wf.py @@ -6,11 +6,12 @@ from .preprocess_anat_pkg.anat_mask_prep import init_anat_mask_prep_wf from .preprocess_bold_pkg.bold_main_wf import init_bold_main_wf, commonspace_reg_function from .preprocess_bold_pkg.registration import run_antsRegistration +from .preprocess_bold_pkg.utils import BIDSDataGraber, prep_bids_iter from nipype.interfaces.io import SelectFiles, DataSink from nipype.interfaces.utility import Function -def init_unified_main_wf(data_dir_path, data_csv, output_folder, tr, tpattern, apply_STC=True, commonspace_method='pydpiper', template_reg_script=None, +def init_unified_main_wf(data_dir_path, data_csv, output_folder, bids_input=False, tr='1.0s', tpattern='altplus', apply_STC=True, commonspace_method='pydpiper', template_reg_script=None, bias_reg_script='Rigid', coreg_script='SyN', isotropic_resampling=False, upsampling=1.0, resampling_data_type='float64', name='main_wf'): ''' This workflow includes complete anatomical and BOLD preprocessing within a single workflow. @@ -23,6 +24,8 @@ def init_unified_main_wf(data_dir_path, data_csv, output_folder, tr, tpattern, a csv file specifying subject id and number of sessions and runs output_folder path to output folder for the workflow and datasink + bids_input + specify if the provided input folder is in a BIDS format to use BIDS reader tr repetition time for the EPI tpattern @@ -65,13 +68,13 @@ def init_unified_main_wf(data_dir_path, data_csv, output_folder, tr, tpattern, a Composite transforms from the EPI space to the anatomical space itk_anat_to_bold Composite transforms from the anatomical space to the EPI space - boldref_warped2anat + bias_cor_bold_warped2anat Bias field corrected 3D EPI volume warped to the anatomical space native_corrected_bold Original BOLD timeseries resampled through motion realignment and susceptibility distortion correction based on registration to the anatomical image - corrected_ref_bold + corrected_bold_ref 3D median EPI volume from the resampled native BOLD timeseries confounds_csv .csv file with measured confound timecourses, including global signal, @@ -113,27 +116,46 @@ def init_unified_main_wf(data_dir_path, data_csv, output_folder, tr, tpattern, a #set output node outputnode = pe.Node(niu.IdentityInterface( - fields=['anat_preproc','anat_mask', 'anat_labels', 'WM_mask', 'CSF_mask','initial_bold_ref', 'bias_cor_bold', 'itk_bold_to_anat','itk_anat_to_bold', 'boldref_warped2anat', 'native_corrected_bold', 'corrected_ref_bold', 'confounds_csv','FD_voxelwise', 'pos_voxelwise', 'FD_csv', + fields=['anat_preproc','anat_mask', 'anat_labels', 'WM_mask', 'CSF_mask','initial_bold_ref', 'bias_cor_bold', 'affine_bold2anat', 'warp_bold2anat', 'inverse_warp_bold2anat', 'bias_cor_bold_warped2anat', 'native_corrected_bold', 'corrected_bold_ref', 'confounds_csv','FD_voxelwise', 'pos_voxelwise', 'FD_csv', 'bold_brain_mask', 'bold_WM_mask', 'bold_CSF_mask', 'bold_labels', 'commonspace_bold', 'commonspace_mask', 'commonspace_WM_mask', 'commonspace_CSF_mask', 'commonspace_labels']), name='outputnode') - - #read the data_info csv - import pandas as pd - data_df=pd.read_csv(data_csv, sep=',') - subject_list=data_df['subject_id'].values.tolist() - session_list=data_df['num_session'].values.tolist() - run_list=data_df['num_run'].values.tolist() - - #create a dictionary with list of bold session numbers for each subject - session_iter={} - for i in range(len(subject_list)): - session_iter[subject_list[i]] = list(range(1,int(session_list[i])+1)) - - #create a dictionary with list of bold run numbers for each subject - run_iter={} - for i in range(len(subject_list)): - run_iter[subject_list[i]] = list(range(1,int(run_list[i])+1)) + if bids_input: + #with BIDS input data + from bids.layout import BIDSLayout + layout = BIDSLayout(data_dir_path) + subject_list, session_iter, run_iter=prep_bids_iter(layout) + #set SelectFiles nodes + anat_selectfiles = pe.Node(BIDSDataGraber(bids_dir=data_dir_path, datatype='anat'), name='anat_selectfiles') + bold_selectfiles = pe.Node(BIDSDataGraber(bids_dir=data_dir_path, datatype='func'), name='bold_selectfiles') + else: + #with restricted data input structure for RABIES + import pandas as pd + data_df=pd.read_csv(data_csv, sep=',') + subject_list=data_df['subject_id'].values.tolist() + session_list=data_df['num_session'].values.tolist() + run_list=data_df['num_run'].values.tolist() + + #create a dictionary with list of bold session numbers for each subject + session_iter={} + for i in range(len(subject_list)): + session_iter[subject_list[i]] = list(range(1,int(session_list[i])+1)) + + #create a dictionary with list of bold run numbers for each subject + run_iter={} + for i in range(len(subject_list)): + run_iter[subject_list[i]] = list(range(1,int(run_list[i])+1)) + + #set SelectFiles nodes + anat_file = opj('sub-{subject_id}', 'ses-{session}', 'anat', 'sub-{subject_id}_ses-{session}_anat.nii.gz') + anat_selectfiles = pe.Node(SelectFiles({'out_file': anat_file}, + base_directory=data_dir_path), + name="anat_selectfiles") + + bold_file = opj('sub-{subject_id}', 'ses-{session}', 'func', 'sub-{subject_id}_ses-{session}_run-{run}_bold.nii.gz') + bold_selectfiles = pe.Node(SelectFiles({'out_file': bold_file}, + base_directory=data_dir_path), + name="bold_selectfiles") ####setting up all iterables infosub_id = pe.Node(niu.IdentityInterface(fields=['subject_id']), @@ -151,20 +173,25 @@ def init_unified_main_wf(data_dir_path, data_csv, output_folder, tr, tpattern, a inforun.iterables = [('run', run_iter)] # Datasink - creates output folder for important outputs - datasink = pe.Node(DataSink(base_directory=output_folder, - container="datasink"), - name="datasink") + anat_datasink = pe.Node(DataSink(base_directory=output_folder, + container="anat_datasink"), + name="anat_datasink") + + bold_datasink = pe.Node(DataSink(base_directory=output_folder, + container="bold_datasink"), + name="bold_datasink") + + commonspace_datasink = pe.Node(DataSink(base_directory=output_folder, + container="commonspace_datasink"), + name="commonspace_datasink") - #set SelectFiles nodes - anat_file = opj('{subject_id}', 'ses-{session}', 'anat', '{subject_id}_ses-{session}_anat.nii.gz') - anat_selectfiles = pe.Node(SelectFiles({'anat': anat_file}, - base_directory=data_dir_path), - name="anat_selectfiles") + transforms_datasink = pe.Node(DataSink(base_directory=output_folder, + container="transforms_datasink"), + name="transforms_datasink") - bold_file = opj('{subject_id}', 'ses-{session}', 'bold', '{subject_id}_ses-{session}_run-{run}_bold.nii.gz') - bold_selectfiles = pe.Node(SelectFiles({'bold': bold_file}, - base_directory=data_dir_path), - name="bold_selectfiles") + confounds_datasink = pe.Node(DataSink(base_directory=output_folder, + container="confounds_datasink"), + name="confounds_datasink") #####setting up commonspace registration within the workflow joinnode_session = pe.JoinNode(niu.IdentityInterface(fields=['file_list']), @@ -217,7 +244,7 @@ def init_unified_main_wf(data_dir_path, data_csv, output_folder, tr, tpattern, a output_names=['ants_dbm_template'], function=commonspace_reg_function), name='commonspace_reg') - commonspace_reg.inputs.output_folder = output_folder+'/datasink/' + commonspace_reg.inputs.output_folder = output_folder+'/commonspace_datasink/' #execute the registration of the generate anatomical template with the provided atlas for labeling and masking template_reg = pe.Node(Function(input_names=['reg_script', 'moving_image', 'fixed_image', 'anat_mask'], @@ -229,9 +256,9 @@ def init_unified_main_wf(data_dir_path, data_csv, output_folder, tr, tpattern, a template_reg.inputs.reg_script = template_reg_script #setting SelectFiles for the commonspace registration - anat_to_template_inverse_warp = output_folder+'/'+opj('datasink','ants_dbm_outputs','ants_dbm','output','secondlevel','secondlevel_{subject_id}_ses-{session}_anat_preproc*1InverseWarp.nii.gz') - anat_to_template_warp = output_folder+'/'+opj('datasink','ants_dbm_outputs','ants_dbm','output','secondlevel','secondlevel_{subject_id}_ses-{session}_anat_preproc*1Warp.nii.gz') - anat_to_template_affine = output_folder+'/'+opj('datasink','ants_dbm_outputs','ants_dbm','output','secondlevel','secondlevel_{subject_id}_ses-{session}_anat_preproc*0GenericAffine.mat') + anat_to_template_inverse_warp = output_folder+'/'+opj('commonspace_datasink','ants_dbm_outputs','ants_dbm','output','secondlevel','secondlevel_sub-{subject_id}_ses-{session}_*_preproc*1InverseWarp.nii.gz') + anat_to_template_warp = output_folder+'/'+opj('commonspace_datasink','ants_dbm_outputs','ants_dbm','output','secondlevel','secondlevel_sub-{subject_id}_ses-{session}_*_preproc*1Warp.nii.gz') + anat_to_template_affine = output_folder+'/'+opj('commonspace_datasink','ants_dbm_outputs','ants_dbm','output','secondlevel','secondlevel_sub-{subject_id}_ses-{session}_*_preproc*0GenericAffine.mat') common_to_template_transform = '/'+opj('{common_to_template_transform}') template_to_common_transform = '/'+opj('{template_to_common_transform}') @@ -292,18 +319,10 @@ def commonspace_transforms(template_to_common_transform,anat_to_template_warp, a (commonspace_reg, commonspace_selectfiles, [ ("ants_dbm_template", "ants_dbm_template"), ]), - (commonspace_reg, datasink, [ - ("ants_dbm_template", "ants_dbm_template"), - ]), (template_reg, commonspace_selectfiles, [ ("inverse_composite_transform", "common_to_template_transform"), ("composite_transform", "template_to_common_transform"), ]), - (template_reg, datasink, [ - ("inverse_composite_transform", "common_to_template_transform"), - ("composite_transform", "template_to_common_transform"), - ("warped_image", "warped_template"), - ]), (commonspace_selectfiles, transform_masks, [ ("common_to_template_transform", "common_to_template_transform"), ("anat_to_template_affine", "anat_to_template_affine"), @@ -327,18 +346,10 @@ def commonspace_transforms(template_to_common_transform,anat_to_template_warp, a ("WM_mask", "WM_mask"), ("CSF_mask", "CSF_mask"), ]), - (commonspace_selectfiles, commonspace_transforms_prep, [ - ("template_to_common_transform", "template_to_common_transform"), - ("anat_to_template_affine", "anat_to_template_affine"), - ("anat_to_template_warp", "anat_to_template_warp"), - ]), - (commonspace_selectfiles, datasink, [ - ("anat_to_template_affine", "anat_to_template_affine"), - ("anat_to_template_warp", "anat_to_template_warp"), - ]), - (commonspace_transforms_prep, bold_main_wf, [ - ("transforms_list", "inputnode.commonspace_transforms_list"), - ("inverses", "inputnode.commonspace_inverses"), + (commonspace_selectfiles, bold_main_wf, [ + ("template_to_common_transform", "inputnode.template_to_common_transform"), + ("anat_to_template_affine", "inputnode.anat_to_template_affine"), + ("anat_to_template_warp", "inputnode.anat_to_template_warp"), ]), (bold_main_wf, outputnode, [ ("outputnode.commonspace_bold", "commonspace_bold"), @@ -347,11 +358,27 @@ def commonspace_transforms(template_to_common_transform,anat_to_template_warp, a ("outputnode.commonspace_CSF_mask", "commonspace_CSF_mask"), ("outputnode.commonspace_labels", "commonspace_labels"), ]), - (outputnode, datasink, [ + (commonspace_reg, commonspace_datasink, [ + ("ants_dbm_template", "ants_dbm_template"), + ]), + (template_reg, commonspace_datasink, [ + ("warped_image", "warped_template"), + ]), + (template_reg, transforms_datasink, [ + ("inverse_composite_transform", "common_to_template_transform"), + ("composite_transform", "template_to_common_transform"), + ]), + (commonspace_selectfiles, transforms_datasink, [ + ("anat_to_template_affine", "anat_to_template_affine"), + ("anat_to_template_warp", "anat_to_template_warp"), + ]), + (outputnode, anat_datasink, [ ("anat_labels", 'anat_labels'), ("anat_mask", 'anat_mask'), ("WM_mask", "WM_mask"), ("CSF_mask", "CSF_mask"), + ]), + (outputnode, bold_datasink, [ ("commonspace_bold", "commonspace_bold"), #resampled EPI after motion realignment and SDC ("commonspace_mask", "commonspace_bold_mask"), ("commonspace_WM_mask", "commonspace_bold_WM_mask"), @@ -362,13 +389,13 @@ def commonspace_transforms(template_to_common_transform,anat_to_template_warp, a # MAIN WORKFLOW STRUCTURE ####################################################### workflow.connect([ - (anat_selectfiles, anat_preproc_wf, [("anat", "inputnode.anat_file")]), - (anat_preproc_wf, datasink, [("outputnode.preproc_anat", "anat_preproc")]), - (bold_selectfiles, datasink, [ - ("bold", "input_bold"), + (anat_selectfiles, anat_preproc_wf, [("out_file", "inputnode.anat_file")]), + (anat_preproc_wf, anat_datasink, [("outputnode.preproc_anat", "anat_preproc")]), + (bold_selectfiles, bold_datasink, [ + ("out_file", "input_bold"), ]), (bold_selectfiles, bold_main_wf, [ - ("bold", "inputnode.bold"), + ("out_file", "inputnode.bold"), ]), (bold_main_wf, outputnode, [ ("outputnode.bold_ref", "initial_bold_ref"), @@ -381,28 +408,34 @@ def commonspace_transforms(template_to_common_transform,anat_to_template_warp, a ("outputnode.FD_voxelwise", "FD_voxelwise"), ("outputnode.pos_voxelwise", "pos_voxelwise"), ("outputnode.FD_csv", "FD_csv"), - ("outputnode.itk_bold_to_anat", "itk_bold_to_anat"), - ("outputnode.itk_anat_to_bold", "itk_anat_to_bold"), - ("outputnode.output_warped_bold", "boldref_warped2anat"), + ('outputnode.affine_bold2anat', 'affine_bold2anat'), + ('outputnode.warp_bold2anat', 'warp_bold2anat'), + ('outputnode.inverse_warp_bold2anat', 'inverse_warp_bold2anat'), + ("outputnode.output_warped_bold", "bias_cor_bold_warped2anat"), ("outputnode.resampled_bold", "native_corrected_bold"), - ("outputnode.resampled_ref_bold", "corrected_ref_bold"), + ("outputnode.resampled_ref_bold", "corrected_bold_ref"), + ]), + (outputnode, transforms_datasink, [ + ('affine_bold2anat', 'affine_bold2anat'), + ('warp_bold2anat', 'warp_bold2anat'), + ('inverse_warp_bold2anat', 'inverse_warp_bold2anat'), ]), - (outputnode, datasink, [ + (outputnode, confounds_datasink, [ + ("confounds_csv", "confounds_csv"), #confounds file + ("FD_voxelwise", "FD_voxelwise"), + ("pos_voxelwise", "pos_voxelwise"), + ("FD_csv", "FD_csv"), + ]), + (outputnode, bold_datasink, [ ("initial_bold_ref","initial_bold_ref"), #inspect initial bold ref ("bias_cor_bold","bias_cor_bold"), #inspect bias correction ("bold_brain_mask","bold_brain_mask"), #get the EPI labels ("bold_WM_mask","bold_WM_mask"), #get the EPI labels ("bold_CSF_mask","bold_CSF_mask"), #get the EPI labels ("bold_labels","bold_labels"), #get the EPI labels - ("confounds_csv", "confounds_csv"), #confounds file - ("FD_voxelwise", "FD_voxelwise"), - ("pos_voxelwise", "pos_voxelwise"), - ("FD_csv", "FD_csv"), - ("itk_bold_to_anat", "itk_bold_to_anat"), - ("itk_anat_to_bold", "itk_anat_to_bold"), - ("boldref_warped2anat","boldref_warped2anat"), #warped EPI to anat + ("bias_cor_bold_warped2anat","bias_cor_bold_warped2anat"), #warped EPI to anat ("native_corrected_bold", "native_corrected_bold"), #resampled EPI after motion realignment and SDC - ("corrected_ref_bold", "corrected_ref_bold"), #resampled EPI after motion realignment and SDC + ("corrected_bold_ref", "corrected_bold_ref"), #resampled EPI after motion realignment and SDC ]), ]) diff --git a/rabies/preprocess_bold_pkg/bias_correction.py b/rabies/preprocess_bold_pkg/bias_correction.py index 9dbb395..f5f5c1a 100644 --- a/rabies/preprocess_bold_pkg/bias_correction.py +++ b/rabies/preprocess_bold_pkg/bias_correction.py @@ -64,6 +64,9 @@ class EPIBiasCorrection(BaseInterface): def _run_interface(self, runtime): import os + import numpy as np + import nibabel as nb + from nibabel import processing subject_id=os.path.basename(self.inputs.input_ref_EPI).split('_ses-')[0] session=os.path.basename(self.inputs.input_ref_EPI).split('_ses-')[1][0] @@ -99,12 +102,22 @@ def _run_interface(self, runtime): raise ValueError(msg) cwd=os.getcwd() - os.system('bash %s %s %s %s %s %s' % (bias_cor_script_path,self.inputs.input_ref_EPI, self.inputs.anat, self.inputs.anat_mask, filename_template, reg_script_path)) - warped_image='%s/%s_output_warped_image.nii.gz' % (cwd, filename_template) resampled_mask='%s/%s_resampled_mask.nii.gz' % (cwd, filename_template) biascor_EPI='%s/%s_bias_cor.nii.gz' % (cwd, filename_template) + #resample to isotropic resolution based on lowest dimension + dim=nb.load(self.inputs.input_ref_EPI).header.get_zooms() + low_dim=np.asarray(dim).min() + processing.resample_to_output(nb.load(self.inputs.input_ref_EPI), voxel_sizes=(low_dim,low_dim,low_dim), order=4).to_filename(cwd+'/resampled.nii.gz') + + os.system('bash %s %s %s %s %s %s' % (bias_cor_script_path,cwd+'/resampled.nii.gz', self.inputs.anat, self.inputs.anat_mask, filename_template, reg_script_path)) + + #resample to anatomical image resolution + dim=nb.load(self.inputs.anat).header.get_zooms() + low_dim=np.asarray(dim).min() + processing.resample_to_output(nb.load(cwd+'/iter_corrected.nii.gz'), voxel_sizes=(low_dim,low_dim,low_dim), order=4).to_filename(biascor_EPI) + setattr(self, 'corrected_EPI', biascor_EPI) setattr(self, 'warped_EPI', warped_image) setattr(self, 'resampled_mask', resampled_mask) diff --git a/rabies/preprocess_bold_pkg/bold_main_wf.py b/rabies/preprocess_bold_pkg/bold_main_wf.py index c6c2c0b..f028dc8 100644 --- a/rabies/preprocess_bold_pkg/bold_main_wf.py +++ b/rabies/preprocess_bold_pkg/bold_main_wf.py @@ -6,7 +6,7 @@ from nipype.interfaces.io import SelectFiles from .hmc import init_bold_hmc_wf -from .utils import init_bold_reference_wf +from .utils import init_bold_reference_wf, BIDSDataGraber, prep_bids_iter from .resampling import init_bold_preproc_trans_wf, init_bold_commonspace_trans_wf from .stc import init_bold_stc_wf from .sdc import init_sdc_wf @@ -130,11 +130,11 @@ def init_bold_main_wf(data_dir_path, tr='1.0s', tpattern='altplus', apply_STC=Tr workflow = pe.Workflow(name=name) - inputnode = pe.Node(niu.IdentityInterface(fields=['subject_id', 'bold', 'anat_preproc', 'anat_mask', 'WM_mask', 'CSF_mask', 'labels', 'commonspace_transforms_list', 'commonspace_inverses']), + inputnode = pe.Node(niu.IdentityInterface(fields=['subject_id', 'bold', 'anat_preproc', 'anat_mask', 'WM_mask', 'CSF_mask', 'labels', 'template_to_common_transform','anat_to_template_affine','anat_to_template_warp']), name="inputnode") outputnode = pe.Node(niu.IdentityInterface( - fields=['input_bold', 'bold_ref', 'skip_vols','motcorr_params', 'corrected_EPI', 'output_warped_bold', 'itk_bold_to_anat', 'itk_anat_to_bold','resampled_bold', 'resampled_ref_bold', 'EPI_brain_mask', 'EPI_WM_mask', 'EPI_CSF_mask', 'EPI_labels', + fields=['input_bold', 'bold_ref', 'skip_vols','motcorr_params', 'corrected_EPI', 'output_warped_bold', 'affine_bold2anat', 'warp_bold2anat', 'inverse_warp_bold2anat','resampled_bold', 'resampled_ref_bold', 'EPI_brain_mask', 'EPI_WM_mask', 'EPI_CSF_mask', 'EPI_labels', 'confounds_csv', 'FD_voxelwise', 'pos_voxelwise', 'FD_csv', 'commonspace_bold', 'commonspace_mask', 'commonspace_WM_mask', 'commonspace_CSF_mask', 'commonspace_labels']), name='outputnode') @@ -152,9 +152,9 @@ def init_bold_main_wf(data_dir_path, tr='1.0s', tpattern='altplus', apply_STC=Tr bold_reg_wf = init_bold_reg_wf(coreg_script=coreg_script) - def SyN_coreg_transforms_prep(itk_bold_to_anat): - return [itk_bold_to_anat],[0] #transforms_list,inverses - transforms_prep = pe.Node(Function(input_names=['itk_bold_to_anat'], + def SyN_coreg_transforms_prep(warp_bold2anat,affine_bold2anat): + return [warp_bold2anat,affine_bold2anat],[0,0] #transforms_list,inverses + transforms_prep = pe.Node(Function(input_names=['warp_bold2anat','affine_bold2anat'], output_names=['transforms_list','inverses'], function=SyN_coreg_transforms_prep), name='transforms_prep') @@ -195,12 +195,14 @@ def SyN_coreg_transforms_prep(itk_bold_to_anat): (bias_cor_wf, outputnode, [ ('outputnode.corrected_EPI', 'corrected_EPI')]), (bold_reg_wf, outputnode, [ - ('outputnode.itk_bold_to_anat', 'itk_bold_to_anat'), - ('outputnode.itk_anat_to_bold', 'itk_anat_to_bold'), + ('outputnode.affine_bold2anat', 'affine_bold2anat'), + ('outputnode.warp_bold2anat', 'warp_bold2anat'), + ('outputnode.inverse_warp_bold2anat', 'inverse_warp_bold2anat'), ('outputnode.output_warped_bold', 'output_warped_bold'), ]), (bold_reg_wf, transforms_prep, [ - ('outputnode.itk_bold_to_anat', 'itk_bold_to_anat'), + ('outputnode.affine_bold2anat', 'affine_bold2anat'), + ('outputnode.warp_bold2anat', 'warp_bold2anat'), ]), (transforms_prep, bold_bold_trans_wf, [ ('transforms_list', 'inputnode.transforms_list'), @@ -245,16 +247,33 @@ def SyN_coreg_transforms_prep(itk_bold_to_anat): ]) if commonspace_transform: + def commonspace_transforms(template_to_common_transform,anat_to_template_warp, anat_to_template_affine, warp_bold2anat, affine_bold2anat): + return [template_to_common_transform,anat_to_template_warp, anat_to_template_affine, warp_bold2anat, affine_bold2anat],[0,0,0,0,0] #transforms_list,inverses + commonspace_transforms_prep = pe.Node(Function(input_names=['template_to_common_transform','anat_to_template_warp','anat_to_template_affine', 'warp_bold2anat', 'affine_bold2anat'], + output_names=['transforms_list','inverses'], + function=commonspace_transforms), + name='commonspace_transforms_prep') + bold_commonspace_trans_wf = init_bold_commonspace_trans_wf(isotropic_resampling=isotropic_resampling, upsampling=upsampling, data_type=resampling_data_type, name='bold_commonspace_trans_wf') workflow.connect([ + (inputnode, commonspace_transforms_prep, [ + ("template_to_common_transform", "template_to_common_transform"), + ("anat_to_template_affine", "anat_to_template_affine"), + ("anat_to_template_warp", "anat_to_template_warp"), + ]), + (bold_reg_wf, commonspace_transforms_prep, [ + ('outputnode.affine_bold2anat', 'affine_bold2anat'), + ('outputnode.warp_bold2anat', 'warp_bold2anat'), + ]), + (commonspace_transforms_prep, bold_commonspace_trans_wf, [ + ('transforms_list', 'inputnode.transforms_list'), + ('inverses', 'inputnode.inverses'), + ]), + (boldbuffer, bold_commonspace_trans_wf, [('bold_file', 'inputnode.bold_file')]), + (bold_hmc_wf, bold_commonspace_trans_wf, [('outputnode.motcorr_params', 'inputnode.motcorr_params')]), (inputnode, bold_commonspace_trans_wf, [ ('bold', 'inputnode.name_source'), - ('commonspace_transforms_list', 'inputnode.transforms_list'), - ('commonspace_inverses', 'inputnode.inverses'), - ]), - (bold_bold_trans_wf, bold_commonspace_trans_wf, [ - ('outputnode.bold', 'inputnode.bold_file'), ]), (bold_commonspace_trans_wf, outputnode, [ ('outputnode.bold', 'commonspace_bold'), @@ -268,7 +287,7 @@ def SyN_coreg_transforms_prep(itk_bold_to_anat): return workflow -def init_EPIonly_bold_main_wf(data_dir_path, data_csv, output_folder, tr='1.0s', tpattern='altplus', apply_STC=True, bias_reg_script='Rigid', coreg_script='SyN', template_reg_script=None, +def init_EPIonly_bold_main_wf(data_dir_path, data_csv, output_folder, bids_input=False, tr='1.0s', tpattern='altplus', apply_STC=True, bias_reg_script='Rigid', coreg_script='SyN', template_reg_script=None, isotropic_resampling=False, upsampling=1.0, resampling_data_type='float64', aCompCor_method='50%', name='bold_main_wf'): """ This is an alternative workflow for EPI-only preprocessing, inluding commonspace @@ -284,6 +303,8 @@ def init_EPIonly_bold_main_wf(data_dir_path, data_csv, output_folder, tr='1.0s', csv file specifying subject id and number of sessions and runs output_folder path to output folder for the workflow and datasink + bids_input + specify if the provided input folder is in a BIDS format to use BIDS reader tr repetition time for the EPI tpattern @@ -380,21 +401,35 @@ def init_EPIonly_bold_main_wf(data_dir_path, data_csv, output_folder, tr='1.0s', 'resampled_bold', 'resampled_ref_bold', 'EPI_brain_mask', 'EPI_WM_mask', 'EPI_CSF_mask', 'EPI_labels', 'confounds_csv', 'FD_voxelwise', 'pos_voxelwise', 'FD_csv']), name='outputnode') - import pandas as pd - data_df=pd.read_csv(data_csv, sep=',') - subject_list=data_df['subject_id'].values.tolist() - session_list=data_df['num_session'].values.tolist() - run_list=data_df['num_run'].values.tolist() + if bids_input: + #with BIDS input data + from bids.layout import BIDSLayout + layout = BIDSLayout(data_dir_path) + subject_list, session_iter, run_iter=prep_bids_iter(layout) + #set SelectFiles nodes + bold_selectfiles = pe.Node(BIDSDataGraber(bids_dir=data_dir_path, datatype='func'), name='bold_selectfiles') + else: + import pandas as pd + data_df=pd.read_csv(data_csv, sep=',') + subject_list=data_df['subject_id'].values.tolist() + session_list=data_df['num_session'].values.tolist() + run_list=data_df['num_run'].values.tolist() + + #create a dictionary with list of bold session numbers for each subject + session_iter={} + for i in range(len(subject_list)): + session_iter[subject_list[i]] = list(range(1,int(session_list[i])+1)) - #create a dictionary with list of bold session numbers for each subject - session_iter={} - for i in range(len(subject_list)): - session_iter[subject_list[i]] = list(range(1,int(session_list[i])+1)) + #create a dictionary with list of bold run numbers for each subject + run_iter={} + for i in range(len(subject_list)): + run_iter[subject_list[i]] = list(range(1,int(run_list[i])+1)) + + bold_file = opj('sub-{subject_id}', 'ses-{session}', 'bold', 'sub-{subject_id}_ses-{session}_run-{run}_bold.nii.gz') + bold_selectfiles = pe.Node(SelectFiles({'out_file': bold_file}, + base_directory=data_dir_path), + name="bold_selectfiles") - #create a dictionary with list of bold run numbers for each subject - run_iter={} - for i in range(len(subject_list)): - run_iter[subject_list[i]] = list(range(1,int(run_list[i])+1)) ####setting up all iterables infosub_id = pe.Node(niu.IdentityInterface(fields=['subject_id']), @@ -411,16 +446,23 @@ def init_EPIonly_bold_main_wf(data_dir_path, data_csv, output_folder, tr='1.0s', inforun.itersource = ('infosub_id', 'subject_id') inforun.iterables = [('run', run_iter)] - bold_file = opj('{subject_id}', 'ses-{session}', 'bold', '{subject_id}_ses-{session}_run-{run}_bold.nii.gz') - bold_selectfiles = pe.Node(SelectFiles({'bold': bold_file}, - base_directory=data_dir_path), - name="bold_selectfiles") - # Datasink - creates output folder for important outputs - datasink = pe.Node(DataSink(base_directory=output_folder, - container="datasink"), - name="datasink") + bold_datasink = pe.Node(DataSink(base_directory=output_folder, + container="bold_datasink"), + name="bold_datasink") + + commonspace_datasink = pe.Node(DataSink(base_directory=output_folder, + container="commonspace_datasink"), + name="commonspace_datasink") + + transforms_datasink = pe.Node(DataSink(base_directory=output_folder, + container="transforms_datasink"), + name="transforms_datasink") + + confounds_datasink = pe.Node(DataSink(base_directory=output_folder, + container="confounds_datasink"), + name="confounds_datasink") bold_reference_wf = init_bold_reference_wf() bias_cor_wf = bias_correction_wf(bias_reg_script=bias_reg_script) @@ -480,7 +522,7 @@ def to_commonspace_transforms_prep(template_to_common_transform, ants_dbm_warp, output_names=['ants_dbm_template'], function=commonspace_reg_function), name='commonspace_reg') - commonspace_reg.inputs.output_folder = output_folder+'/datasink/' + commonspace_reg.inputs.output_folder = output_folder+'/commonspace_datasink/' #execute the registration of the generate anatomical template with the provided atlas for labeling and masking template_reg = pe.Node(Function(input_names=['reg_script', 'moving_image', 'fixed_image', 'anat_mask'], @@ -492,9 +534,9 @@ def to_commonspace_transforms_prep(template_to_common_transform, ants_dbm_warp, template_reg.inputs.reg_script = template_reg_script #setting SelectFiles for the commonspace registration - ants_dbm_inverse_warp = output_folder+'/'+opj('datasink','ants_dbm_outputs','ants_dbm','output','secondlevel','secondlevel_{sub}_ses-{ses}_run-{run}_bias_cor*1InverseWarp.nii.gz') - ants_dbm_warp = output_folder+'/'+opj('datasink','ants_dbm_outputs','ants_dbm','output','secondlevel','secondlevel_{sub}_ses-{ses}_run-{run}_bias_cor*1Warp.nii.gz') - ants_dbm_affine = output_folder+'/'+opj('datasink','ants_dbm_outputs','ants_dbm','output','secondlevel','secondlevel_{sub}_ses-{ses}_run-{run}_bias_cor*0GenericAffine.mat') + ants_dbm_inverse_warp = output_folder+'/'+opj('commonspace_datasink','ants_dbm_outputs','ants_dbm','output','secondlevel','secondlevel_{sub}_ses-{ses}_run-{run}_bias_cor*1InverseWarp.nii.gz') + ants_dbm_warp = output_folder+'/'+opj('commonspace_datasink','ants_dbm_outputs','ants_dbm','output','secondlevel','secondlevel_{sub}_ses-{ses}_run-{run}_bias_cor*1Warp.nii.gz') + ants_dbm_affine = output_folder+'/'+opj('commonspace_datasink','ants_dbm_outputs','ants_dbm','output','secondlevel','secondlevel_{sub}_ses-{ses}_run-{run}_bias_cor*0GenericAffine.mat') common_to_template_transform = '/'+opj('{common_to_template_transform}') template_to_common_transform = '/'+opj('{template_to_common_transform}') @@ -545,9 +587,6 @@ def to_commonspace_transforms_prep(template_to_common_transform, ants_dbm_warp, (commonspace_reg, template_reg, [ ("ants_dbm_template", "moving_image"), ]), - (commonspace_reg, datasink, [ - ("ants_dbm_template", "ants_dbm_template"), - ]), (template_reg, commonspace_selectfiles, [ ("inverse_composite_transform", "common_to_template_transform"), ("composite_transform", "template_to_common_transform"), @@ -556,8 +595,8 @@ def to_commonspace_transforms_prep(template_to_common_transform, ants_dbm_warp, # MAIN WORKFLOW STRUCTURE ####################################################### workflow.connect([ - (bold_selectfiles, bold_reference_wf, [('bold', 'inputnode.bold_file')]), - (bold_selectfiles, bold_bold_trans_wf, [('bold', 'inputnode.name_source')]), + (bold_selectfiles, bold_reference_wf, [('out_file', 'inputnode.bold_file')]), + (bold_selectfiles, bold_bold_trans_wf, [('out_file', 'inputnode.name_source')]), (bold_reference_wf, bias_cor_wf, [ ('outputnode.ref_image', 'inputnode.ref_EPI') ]), @@ -628,21 +667,30 @@ def to_commonspace_transforms_prep(template_to_common_transform, ants_dbm_warp, ('ants_dbm_affine', 'ants_dbm_affine'), ('ants_dbm_template_anat', 'ants_dbm_template_anat'), ]), - (outputnode, datasink, [ + (commonspace_reg, commonspace_datasink, [ + ("ants_dbm_template", "ants_dbm_template"), + ]), + (outputnode, bold_datasink, [ ("bold_ref","initial_bold_ref"), #inspect initial bold ref ("corrected_EPI","bias_cor_bold"), #inspect bias correction ("EPI_brain_mask","bold_brain_mask"), #get the EPI labels ("EPI_WM_mask","bold_WM_mask"), #get the EPI labels ("EPI_CSF_mask","bold_CSF_mask"), #get the EPI labels ("EPI_labels","bold_labels"), #get the EPI labels + ("resampled_bold", "corrected_bold"), #resampled EPI after motion realignment and SDC + ("resampled_ref_bold", "corrected_bold_ref"), #resampled EPI after motion realignment and SDC + ]), + (outputnode, confounds_datasink, [ ("confounds_csv", "confounds_csv"), #confounds file ("FD_voxelwise", "FD_voxelwise"), ("pos_voxelwise", "pos_voxelwise"), ("FD_csv", "FD_csv"), - ("resampled_bold", "native_corrected_bold"), #resampled EPI after motion realignment and SDC - ("resampled_ref_bold", "corrected_ref_bold"), #resampled EPI after motion realignment and SDC + ]), + (outputnode, transforms_datasink, [ ("common_to_template_transform", "common_to_template_transform"), ("template_to_common_transform", "template_to_common_transform"), + ]), + (outputnode, commonspace_datasink, [ ("warped_template", "warped_template"), ]), ]) diff --git a/rabies/preprocess_bold_pkg/registration.py b/rabies/preprocess_bold_pkg/registration.py index cea1733..c829f93 100644 --- a/rabies/preprocess_bold_pkg/registration.py +++ b/rabies/preprocess_bold_pkg/registration.py @@ -49,14 +49,14 @@ def init_bold_reg_wf(coreg_script='SyN', name='bold_reg_wf'): outputnode = pe.Node( niu.IdentityInterface(fields=[ - 'itk_bold_to_anat', 'itk_anat_to_bold', 'output_warped_bold']), + 'affine_bold2anat', 'warp_bold2anat', 'inverse_warp_bold2anat', 'output_warped_bold']), name='outputnode' ) run_reg = pe.Node(Function(input_names=["reg_script", "moving_image", "fixed_image", "anat_mask"], - output_names=['itk_bold_to_anat', 'itk_anat_to_bold', 'output_warped_bold'], + output_names=['affine_bold2anat', 'warp_bold2anat', 'inverse_warp_bold2anat', 'output_warped_bold'], function=run_antsRegistration), name='EPI_Coregistration') run_reg.inputs.reg_script=coreg_script run_reg.plugin_args = {'qsub_args': '-pe smp 4', 'overwrite': True} @@ -67,8 +67,9 @@ def init_bold_reg_wf(coreg_script='SyN', name='bold_reg_wf'): ('anat_preproc', 'fixed_image'), ('anat_mask', 'anat_mask')]), (run_reg, outputnode, [ - ('itk_bold_to_anat', 'itk_bold_to_anat'), - ('itk_anat_to_bold', 'itk_anat_to_bold'), + ('affine_bold2anat', 'affine_bold2anat'), + ('warp_bold2anat', 'warp_bold2anat'), + ('inverse_warp_bold2anat', 'inverse_warp_bold2anat'), ('output_warped_bold', 'output_warped_bold'), ]), ]) @@ -76,86 +77,26 @@ def init_bold_reg_wf(coreg_script='SyN', name='bold_reg_wf'): return workflow -def run_antsRegistration(reg_script='Affine', moving_image='NULL', fixed_image='NULL', anat_mask='NULL'): +def run_antsRegistration(reg_script, moving_image='NULL', fixed_image='NULL', anat_mask='NULL'): import os filename_template=os.path.basename(moving_image).split('.')[0] - if reg_script=='SyN': - import rabies - dir_path = os.path.dirname(os.path.realpath(rabies.__file__)) - reg_script_path=dir_path+'/shell_scripts/SyN_registration.sh' - ''' - EPI=$1 - anat_file=$2 - mask=$3 - filename_template=$4 - - antsRegistration -d 3 \ - --verbose -o [${filename_template}_output_,${filename_template}_output_warped_image.nii.gz] \ - -t Rigid[0.1] -m Mattes[$anat_file,$EPI,1,64,None] \ - -c 1000x500x250x100x50x25 -s 8x4x2x1x0.5x0 -f 6x5x4x3x2x1 --masks [NULL,NULL] \ - -t Similarity[0.1] -m Mattes[$anat_file,$EPI,1,64,None] \ - -c 100x50x25 -s 1x0.5x0 -f 3x2x1 --masks [$mask,NULL] \ - -t Affine[0.1] -m Mattes[$anat_file,$EPI,1,64,None] \ - -c 100x50x25 -s 1x0.5x0 -f 3x2x1 --masks [$mask,$mask] \ - -t SyN[ 0.2, 3.0, 0.0 ] -m Mattes[$anat_file,$EPI, 1, 64 ] \ - -c [ 40x20x0, 1e-06, 6 ] -s 2x1x0 -f 4x2x1 --masks [$mask,$mask] \ - --interpolation BSpline[5] -z 1 -u 0 -a 1 - ''' - - elif reg_script=='Affine': - import rabies - dir_path = os.path.dirname(os.path.realpath(rabies.__file__)) - reg_script_path=dir_path+'/shell_scripts/Affine_registration.sh' - ''' - EPI=$1 - anat_file=$2 - mask=$3 - filename_template=$4 - - antsRegistration -d 3 \ - --verbose -o [${filename_template}_output_,${filename_template}_output_warped_image.nii.gz] \ - -t Rigid[0.1] -m Mattes[$anat_file,$EPI,1,64,None] \ - -c 1000x500x250x100x50x25 -s 8x4x2x1x0.5x0 -f 6x5x4x3x2x1 --masks [NULL,NULL] \ - -t Similarity[0.1] -m Mattes[$anat_file,$EPI,1,64,None] \ - -c 100x50x25 -s 1x0.5x0 -f 3x2x1 --masks [$mask,NULL] \ - -t Affine[0.1] -m Mattes[$anat_file,$EPI,1,64,None] \ - -c 100x50x25 -s 1x0.5x0 -f 3x2x1 --masks [$mask,$mask] \ - --interpolation BSpline[5] -z 1 -u 0 -a 1 - ''' - - elif reg_script=='Rigid': - import rabies - dir_path = os.path.dirname(os.path.realpath(rabies.__file__)) - reg_script_path=dir_path+'/shell_scripts/Rigid_registration.sh' - ''' - EPI=$1 - anat_file=$2 - mask=$3 - filename_template=$4 - - antsRegistration -d 3 \ - --verbose -o [${filename_template}_output_,${filename_template}_output_warped_image.nii.gz] \ - -t Rigid[0.1] -m Mattes[$anat_file,$EPI,1,64,None] \ - -c 1000x500x250x100x50x25 -s 8x4x2x1x0.5x0 -f 6x5x4x3x2x1 --masks [NULL,NULL] \ - --interpolation BSpline[5] -z 1 -u 0 -a 1 - ''' - + if os.path.isfile(reg_script): + reg_script_path=reg_script else: - ''' - For user-provided antsRegistration command. - ''' - if os.path.isfile(reg_script): - reg_script_path=reg_script - else: - raise ValueError('REGISTRATION ERROR: THE REG SCRIPT FILE DOES NOT EXISTS') + raise ValueError('REGISTRATION ERROR: THE REG SCRIPT FILE DOES NOT EXISTS') os.system('bash %s %s %s %s %s' % (reg_script_path,moving_image, fixed_image, anat_mask, filename_template)) cwd=os.getcwd() warped_image='%s/%s_output_warped_image.nii.gz' % (cwd, filename_template) - inverse_composite_transform='%s/%s_output_InverseComposite.h5' % (cwd, filename_template) - composite_transform='%s/%s_output_Composite.h5' % (cwd, filename_template) - if not os.path.isfile(warped_image) or not os.path.isfile(inverse_composite_transform) or not os.path.isfile(composite_transform): + affine='%s/%s_output_0GenericAffine.mat' % (cwd, filename_template) + warp='%s/%s_output_1Warp.nii.gz' % (cwd, filename_template) + inverse_warp='%s/%s_output_1InverseWarp.nii.gz' % (cwd, filename_template) + if not os.path.isfile(warped_image) or not os.path.isfile(affine): raise ValueError('REGISTRATION ERROR: OUTPUT FILES MISSING.') + if not os.path.isfile(warp) or not os.path.isfile(inverse_warp): + print('No non-linear warp files as output. Assumes linear registration.') + warp='NULL' + inverse_warp='NULL' - return [composite_transform, inverse_composite_transform, warped_image] + return [affine, warp, inverse_warp, warped_image] diff --git a/rabies/preprocess_bold_pkg/resampling.py b/rabies/preprocess_bold_pkg/resampling.py index 0ff3366..8aedb47 100644 --- a/rabies/preprocess_bold_pkg/resampling.py +++ b/rabies/preprocess_bold_pkg/resampling.py @@ -58,7 +58,7 @@ def init_bold_commonspace_trans_wf(isotropic_resampling, upsampling, data_type=' workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=[ - 'name_source', 'bold_file', 'transforms_list', 'inverses']), + 'name_source', 'bold_file', 'motcorr_params', 'transforms_list', 'inverses']), name='inputnode' ) @@ -67,8 +67,8 @@ def init_bold_commonspace_trans_wf(isotropic_resampling, upsampling, data_type=' name='outputnode') bold_transform = pe.Node(slice_applyTransforms(), name='bold_transform') + bold_transform.inputs.apply_motcorr = True bold_transform.inputs.ref_file = os.environ["template_anat"] - bold_transform.inputs.apply_motcorr = False bold_transform.inputs.isotropic_resampling = isotropic_resampling bold_transform.inputs.upsampling = upsampling bold_transform.inputs.data_type = data_type @@ -101,6 +101,7 @@ def init_bold_commonspace_trans_wf(isotropic_resampling, upsampling, data_type=' (inputnode, merge, [('name_source', 'header_source')]), (inputnode, bold_transform, [ ('bold_file', 'in_file'), + ('motcorr_params', 'motcorr_params'), ('transforms_list', 'transforms'), ('inverses', 'inverses'), ]), diff --git a/rabies/preprocess_bold_pkg/sdc.py b/rabies/preprocess_bold_pkg/sdc.py deleted file mode 100644 index f9861c2..0000000 --- a/rabies/preprocess_bold_pkg/sdc.py +++ /dev/null @@ -1,79 +0,0 @@ -import os -from nipype.pipeline import engine as pe -from nipype.interfaces import utility as niu -from nipype import Function -from .utils import init_bold_reference_wf, Merge - -def init_sdc_wf(tool, name='sdc_wf'): - - workflow = pe.Workflow(name=name) - inputnode = pe.Node(niu.IdentityInterface(fields=['ref_bold', 'reversed_bold_file', 'name_source']), - name='inputnode') - outputnode = pe.Node( - niu.IdentityInterface(fields=['fieldwarp', 'affine_trans', 'nlin_trans', 'avg_corrected_image', 'ref_reversed_bold_file']), - name='outputnode') - - bold_reference_wf = init_bold_reference_wf(enhance_contrast=True) - workflow.connect([ - (inputnode, bold_reference_wf, [('reversed_bold_file', 'inputnode.bold_file')]), - (bold_reference_wf, outputnode, [('outputnode.enhanced_ref_image', 'ref_reversed_bold_file')]), - ]) - - if tool=='FSL': - mk_list = pe.Node(Function(input_names=["first_img", "second_img"], - output_names=["list"], - function=makeList), name='mk_list') - - from .utils import Merge - merge = pe.Node(Merge(), name='merge') - - from nipype.interfaces.fsl import TOPUP - sdc_topup = pe.Node(TOPUP(config=os.path.abspath("/topup/empty.cnf"), encoding_file = os.path.abspath("/topup/topup_encoding.txt"), - output_type = "NIFTI_GZ", warp_res = 1, fwhm=0.3, max_iter=10, ssqlambda=1, regmod='bending_energy', estmov=1, minmet=0, - splineorder=3, numprec='double', interp='spline', scale=1 - #topup.inputs.subsamp = 2, topup.inputs.reg_lambda = 0.005 - ), name='SDC_TOPUP') - - workflow.connect([ - (inputnode, mk_list, [('ref_bold', 'first_img')]), - (bold_reference_wf, mk_list, [('outputnode.ref_image', 'second_img')]), - (mk_list, merge, [('list', 'in_files')]), - (inputnode, merge, [('name_source', 'header_source')]), - (merge, sdc_topup, [('out_file', 'in_file')]), - (sdc_topup, outputnode, [ - ('out_field', 'fieldwarp'), - ('out_corrected', 'avg_corrected_image')]), - ]) - - elif tool=='ANTs': - from .utils import antsGenerateTemplate - sdc_antsGenTemplate = pe.Node(antsGenerateTemplate(), name='sdc_antsGenTemplate') - workflow.connect([ - (inputnode, sdc_antsGenTemplate, [('ref_bold', 'EPI')]), - (bold_reference_wf, sdc_antsGenTemplate, [('outputnode.ref_image', 'reversed_EPI')]), - (sdc_antsGenTemplate, outputnode, [ - ('affine_trans', 'affine_trans'), - ('nlin_trans', 'nlin_trans')]), - ]) - - - return workflow - - -def writeCSV(first_img, second_img): - #create a CSV file as input for antsGenerateTemplate - import csv - import os - with open('EPI.csv', 'w', newline='') as csvfile: - csvwriter = csv.writer(csvfile, delimiter=' ', - quotechar='|', quoting=csv.QUOTE_MINIMAL) - csvwriter.writerow([first_img]) - csvwriter.writerow([second_img]) - inputs = os.path.abspath('EPI.csv') - - return inputs - -def makeList(first_img, second_img): - #merge two files into a list - file_list = [first_img, second_img] - return file_list diff --git a/rabies/preprocess_bold_pkg/utils.py b/rabies/preprocess_bold_pkg/utils.py index af20725..a14f2b4 100644 --- a/rabies/preprocess_bold_pkg/utils.py +++ b/rabies/preprocess_bold_pkg/utils.py @@ -7,6 +7,74 @@ ) from nipype.interfaces.base import CommandLine, CommandLineInputSpec +def prep_bids_iter(layout): + subject_list=layout.get_subject() + #create a dictionary with list of bold session and run numbers for each subject + session_iter={} + run_iter={} + for sub in subject_list: + sub_func=layout.get(datatype='func', extension=['nii', 'nii.gz']) + session=0 + run=0 + for func_bids in sub_func: + if int(func_bids.get_entities()['session'])>session: + session=int(func_bids.get_entities()['session']) + if int(func_bids.get_entities()['run'])>run: + run=int(func_bids.get_entities()['run']) + session_iter[sub] = list(range(1,int(session)+1)) + run_iter[sub] = list(range(1,int(run)+1)) + return subject_list, session_iter, run_iter + +class BIDSDataGraberInputSpec(BaseInterfaceInputSpec): + bids_dir = traits.Str(exists=True, mandatory=True, desc="BIDS data directory") + datatype = traits.Str(exists=True, mandatory=True, desc="datatype of the target file") + subject_id = traits.Str(exists=True, mandatory=True, desc="Subject ID") + session = traits.Int(exists=True, mandatory=True, desc="Session number") + run = traits.Int(exists=True, default=None, desc="Run number") + +class BIDSDataGraberOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="Selected file based on the provided parameters.") + +class BIDSDataGraber(BaseInterface): + """ + """ + + input_spec = BIDSDataGraberInputSpec + output_spec = BIDSDataGraberOutputSpec + + def _run_interface(self, runtime): + import os + from bids.layout import BIDSLayout + layout = BIDSLayout(self.inputs.bids_dir) + try: + if self.inputs.datatype=='func': + bids_file=layout.get(subject=self.inputs.subject_id, session=self.inputs.session, run=self.inputs.run, extension=['nii', 'nii.gz'], datatype=self.inputs.datatype) + func=layout.get(subject=self.inputs.subject_id, session=self.inputs.session, run=self.inputs.run, extension=['nii', 'nii.gz'], datatype=self.inputs.datatype, return_type='filename') + file=func[0] + elif self.inputs.datatype=='anat': + bids_file=layout.get(subject=self.inputs.subject_id, session=self.inputs.session, extension=['nii', 'nii.gz'], datatype=self.inputs.datatype) + anat=layout.get(subject=self.inputs.subject_id, session=self.inputs.session, extension=['nii', 'nii.gz'], datatype=self.inputs.datatype, return_type='filename') + file=anat[0] + else: + raise ValueError('Wrong datatype %s' % (self.inputs.datatype)) + if len(bids_file)>1: + raise ValueError('Provided BIDS spec lead to duplicates: %s' % (str(self.inputs.datatype+'_'+self.inputs.subject_id+'_'+self.inputs.session+'_'+self.inputs.run))) + except: + raise ValueError('Error with BIDS spec: %s' % (str(self.inputs.datatype+'_'+self.inputs.subject_id+'_'+self.inputs.session+'_'+self.inputs.run))) + + nii_format=bids_file[0].get_entities()['extension'] + #RABIES only work with compressed .nii for now + if nii_format=='nii': + os.system('gzip %s' % (file,)) + file=file+'.gz' + + setattr(self, 'out_file', file) + + return runtime + + def _list_outputs(self): + return {'out_file': getattr(self, 'out_file')} + def init_bold_reference_wf(name='gen_bold_ref'): """ diff --git a/rabies/run_main.py b/rabies/run_main.py index 9984d36..ad43697 100644 --- a/rabies/run_main.py +++ b/rabies/run_main.py @@ -5,6 +5,10 @@ from pathlib import Path import pathos.multiprocessing as multiprocessing # Better multiprocessing +import logging +logging.basicConfig(filename='rabies.log', filemode='w', format='%(asctime)s - %(levelname)s - %(message)s', level=os.environ.get("LOGLEVEL", "INFO")) +log = logging.getLogger(__name__) + def get_parser(): """Build parser object""" parser = argparse.ArgumentParser( @@ -17,6 +21,9 @@ def get_parser(): help='the root folder of the input data directory.') parser.add_argument('output_dir', action='store', type=Path, help='the output path to drop outputs from major preprocessing steps.') + parser.add_argument("--bids_input", type=bool, default=False, + help="If the provided input data folder is in the BIDS format to use the BIDS reader." + "Note that all .nii inputs will be converted to compressed .gz format.") parser.add_argument("-e", "--bold_only", type=bool, default=False, help="preprocessing with only EPI scans. commonspace registration and distortion correction" " is executed through registration of the EPIs to a common template atlas.") @@ -26,7 +33,7 @@ def get_parser(): parser.add_argument("-b", "--bias_reg_script", type=str, default='Rigid', help="specify a registration script for iterative bias field correction. 'default' is a rigid registration.") parser.add_argument("-r", "--coreg_script", type=str, default='SyN', - help="Specify EPI to anat coregistration script. Built-in options include 'Rigid', 'Affine' and 'SyN' (non-linear), but" + help="Specify EPI to anat coregistration script. Built-in options include 'Rigid', 'Affine', 'SyN' (non-linear) and 'light_SyN', but" " can specify a custom registration script following the template script structure (see RABIES/rabies/shell_scripts/ for template).") parser.add_argument("-p", "--plugin", type=str, default='Linear', help="Specify the nipype plugin for workflow execution. Consult nipype plugin documentation for detailed options." @@ -111,10 +118,23 @@ def execute_workflow(): #generates the parser CLI and execute the workflow based on specified parameters. opts = get_parser().parse_args() + ###managing log info + from ._info import __version__ + log.info('Running RABIES - version: '+__version__) + + #print complete CLI command + args='CLI INPUTS: \n' + for arg in vars(opts): + input='-> {arg} = {value} \n'.format( + arg=arg, value=getattr(opts, arg)) + args+=input + log.info(args) + #obtain parser parameters + bids_input=opts.bids_input bold_preproc_only=opts.bold_only bias_reg_script=opts.bias_reg_script - coreg_script=opts.coreg_script + coreg_script=define_reg_script(opts.coreg_script) commonspace_method=opts.commonspace_method data_dir_path=os.path.abspath(str(opts.input_dir)) output_folder=os.path.abspath(str(opts.output_dir)) @@ -142,24 +162,7 @@ def execute_workflow(): os.environ["ants_dbm_memory_request"]=opts.memory_request os.environ["ants_dbm_local_threads"]=str(opts.local_threads) template_reg_option=opts.template_reg_script - import rabies - dir_path = os.path.dirname(os.path.realpath(rabies.__file__)) - if template_reg_option=='SyN': - template_reg_script=dir_path+'/shell_scripts/SyN_registration.sh' - elif template_reg_option=='light_SyN': - template_reg_script=dir_path+'/shell_scripts/light_SyN_registration.sh' - elif template_reg_option=='Affine': - template_reg_script=dir_path+'/shell_scripts/Affine_registration.sh' - elif template_reg_option=='Rigid': - template_reg_script=dir_path+'/shell_scripts/Rigid_registration.sh' - else: - ''' - For user-provided antsRegistration command. - ''' - if os.path.isfile(template_reg_option): - template_reg_script=template_reg_option - else: - raise ValueError('REGISTRATION ERROR: THE REG SCRIPT FILE DOES NOT EXISTS') + template_reg_script=define_reg_script(template_reg_option) #template options # set OS paths to template and atlas files @@ -191,10 +194,10 @@ def execute_workflow(): if bold_preproc_only: from rabies.preprocess_bold_pkg.bold_main_wf import init_EPIonly_bold_main_wf - workflow = init_EPIonly_bold_main_wf(data_dir_path, data_csv, output_folder, tr=stc_TR, tpattern=stc_tpattern, apply_STC=stc_bool, bias_reg_script=bias_reg_script, coreg_script=coreg_script, template_reg_script=template_reg_script, isotropic_resampling=isotropic_resampling, upsampling=upsampling, resampling_data_type=resampling_data_type) + workflow = init_EPIonly_bold_main_wf(data_dir_path, data_csv, output_folder, bids_input=bids_input, tr=stc_TR, tpattern=stc_tpattern, apply_STC=stc_bool, bias_reg_script=bias_reg_script, coreg_script=coreg_script, template_reg_script=template_reg_script, isotropic_resampling=isotropic_resampling, upsampling=upsampling, resampling_data_type=resampling_data_type) elif not bold_preproc_only: from rabies.main_wf import init_unified_main_wf - workflow = init_unified_main_wf(data_dir_path, data_csv, output_folder, tr=stc_TR, tpattern=stc_tpattern, commonspace_method=commonspace_method, template_reg_script=template_reg_script, apply_STC=stc_bool, bias_reg_script=bias_reg_script, coreg_script=coreg_script, isotropic_resampling=isotropic_resampling, upsampling=upsampling, resampling_data_type=resampling_data_type) + workflow = init_unified_main_wf(data_dir_path, data_csv, output_folder, bids_input=bids_input, tr=stc_TR, tpattern=stc_tpattern, commonspace_method=commonspace_method, template_reg_script=template_reg_script, apply_STC=stc_bool, bias_reg_script=bias_reg_script, coreg_script=coreg_script, isotropic_resampling=isotropic_resampling, upsampling=upsampling, resampling_data_type=resampling_data_type) else: raise ValueError('bold_preproc_only must be true or false.') @@ -217,6 +220,32 @@ def execute_workflow(): 'log_directory' : os.getcwd()} print('Debug ON') - print('Running main workflow with %s plugin.' % plugin) - #execute workflow, with plugin_args limiting the cluster load for parallel execution - workflow.run(plugin=plugin, plugin_args = {'max_jobs':50,'dont_resubmit_completed_jobs': True, 'qsub_args': '-pe smp %s' % (os.environ["min_proc"])}) + try: + print('Running main workflow with %s plugin.' % plugin) + #execute workflow, with plugin_args limiting the cluster load for parallel execution + workflow.run(plugin=plugin, plugin_args = {'max_jobs':50,'dont_resubmit_completed_jobs': True, 'qsub_args': '-pe smp %s' % (os.environ["min_proc"])}) + except Exception as e: + log.critical('RABIES failed: %s', e) + raise + + +def define_reg_script(reg_option): + import rabies + dir_path = os.path.dirname(os.path.realpath(rabies.__file__)) + if reg_option=='SyN': + reg_script=dir_path+'/shell_scripts/SyN_registration.sh' + elif reg_option=='light_SyN': + reg_script=dir_path+'/shell_scripts/light_SyN_registration.sh' + elif reg_option=='Affine': + reg_script=dir_path+'/shell_scripts/Affine_registration.sh' + elif reg_option=='Rigid': + reg_script=dir_path+'/shell_scripts/Rigid_registration.sh' + else: + ''' + For user-provided antsRegistration command. + ''' + if os.path.isfile(reg_option): + reg_script=reg_option + else: + raise ValueError('REGISTRATION ERROR: THE REG SCRIPT FILE DOES NOT EXISTS') + return reg_script diff --git a/rabies/shell_scripts/Affine_registration.sh b/rabies/shell_scripts/Affine_registration.sh index 28c87e8..8b7cca5 100644 --- a/rabies/shell_scripts/Affine_registration.sh +++ b/rabies/shell_scripts/Affine_registration.sh @@ -11,4 +11,4 @@ antsRegistration -d 3 \ -c 100x50x25 -s 1x0.5x0 -f 3x2x1 --masks [$mask,NULL] \ -t Affine[0.1] -m Mattes[$anat_file,$EPI,1,64,None] \ -c 100x50x25 -s 1x0.5x0 -f 3x2x1 --masks [$mask,$mask] \ ---interpolation BSpline[5] -z 1 -u 0 -a 1 +--interpolation BSpline[5] -z 1 -u 0 diff --git a/rabies/shell_scripts/Rigid_registration.sh b/rabies/shell_scripts/Rigid_registration.sh index 2ddfc43..790b0c8 100644 --- a/rabies/shell_scripts/Rigid_registration.sh +++ b/rabies/shell_scripts/Rigid_registration.sh @@ -8,4 +8,4 @@ antsRegistration -d 3 \ --initial-moving-transform [$anat_file,$EPI,1] \ -t Rigid[0.1] -m Mattes[$anat_file,$EPI,1,64,None] \ -c 1000x500x250x100x50x25 -s 8x4x2x1x0.5x0 -f 6x5x4x3x2x1 --masks [NULL,NULL] \ ---interpolation BSpline[5] -z 1 -u 0 -a 1 +--interpolation BSpline[5] -z 1 -u 0 diff --git a/rabies/shell_scripts/SyN_registration.sh b/rabies/shell_scripts/SyN_registration.sh index 4c34825..dd504e1 100644 --- a/rabies/shell_scripts/SyN_registration.sh +++ b/rabies/shell_scripts/SyN_registration.sh @@ -28,4 +28,4 @@ antsRegistration --dimensionality 3 \ --transform SyN[0.2,2,0] --metric CC[$anat_file,$EPI,1,4] --convergence [2025x2025x2025x2025x2025x2025x2025x2025x2025x2025x2025x2025x2025x200x50x25x25,1e-6,10] \ --shrink-factors 8x8x8x8x8x8x8x8x8x7x6x5x4x3x2x1x1 \ --smoothing-sigmas 7.99225592362x7.49173910041x6.99114831402x6.49046645078x5.98967067114x5.48872979374x4.98760009911x4.48621831264x3.98448927075x3.4822628776x2.97928762436x2.47510701762x1.96879525311x1.45813399545x0.936031382318x0.355182697615x0 \ - --masks [NULL,NULL] -z 1 -a 1 + --masks [NULL,NULL] -z 1 diff --git a/rabies/shell_scripts/iter_bias_cor.sh b/rabies/shell_scripts/iter_bias_cor.sh index a816eff..844456b 100644 --- a/rabies/shell_scripts/iter_bias_cor.sh +++ b/rabies/shell_scripts/iter_bias_cor.sh @@ -4,18 +4,11 @@ mask=$3 filename_template=$4 reg_script=$5 -ResampleImage 3 $EPI resampled.nii.gz 0.4x0.4x0.4 0 4 -ImageMath 3 null_mask.nii.gz ThresholdAtMean resampled.nii.gz 0 -ImageMath 3 thresh_mask.nii.gz ThresholdAtMean resampled.nii.gz 2 -N4BiasFieldCorrection -d 3 -i resampled.nii.gz -b 20 -s 1 -c [100x100x100x100,1e-6] -w thresh_mask.nii.gz -x null_mask.nii.gz -o corrected.nii.gz +ImageMath 3 null_mask.nii.gz ThresholdAtMean $EPI 0 +ImageMath 3 thresh_mask.nii.gz ThresholdAtMean $EPI 2 +N4BiasFieldCorrection -d 3 -i $EPI -b 20 -s 1 -c [100x100x100x100,1e-6] -w thresh_mask.nii.gz -x null_mask.nii.gz -o corrected.nii.gz #iterative registration and bias correction -ResampleImage 3 corrected.nii.gz resampled100.nii.gz 0.1x0.1x0.1 0 4 - -bash $reg_script resampled100.nii.gz $anat_file $mask $filename_template - -antsApplyTransforms -d 3 -i $mask -t ${filename_template}_output_InverseComposite.h5 -r resampled.nii.gz -o ${filename_template}_resampled_mask.nii.gz -n GenericLabel - +bash $reg_script corrected.nii.gz $anat_file $mask $filename_template +antsApplyTransforms -d 3 -i $mask -t [${filename_template}_output_0GenericAffine.mat,1] -r $EPI -o ${filename_template}_resampled_mask.nii.gz -n GenericLabel N4BiasFieldCorrection -d 3 -i resampled.nii.gz -b 20 -s 1 -c [100x100x100x100,1e-6] -w ${filename_template}_resampled_mask.nii.gz -x null_mask.nii.gz -o iter_corrected.nii.gz - -ResampleImage 3 iter_corrected.nii.gz ${filename_template}_bias_cor.nii.gz 0.1x0.1x0.1 0 4 diff --git a/rabies/shell_scripts/light_SyN_registration.sh b/rabies/shell_scripts/light_SyN_registration.sh index 4377f48..47a2d88 100644 --- a/rabies/shell_scripts/light_SyN_registration.sh +++ b/rabies/shell_scripts/light_SyN_registration.sh @@ -24,4 +24,4 @@ antsRegistration --dimensionality 3 \ --transform SyN[0.2,2,0] --metric CC[$fixed,$moving,1,4] --convergence [2025x2025x200x50x25x25,1e-6,10] \ --shrink-factors 6x4x3x2x1x1 \ --smoothing-sigmas 5.48872979374x3.4822628776x2.47510701762x1.45813399545x0.936031382318x0 \ - --masks [NULL,NULL] -z 1 -a 1 + --masks [NULL,NULL] -z 1 diff --git a/rabies_environment.yml b/rabies_environment.yml index 076bd5f..7054e94 100644 --- a/rabies_environment.yml +++ b/rabies_environment.yml @@ -1,9 +1,8 @@ name: rabies channels: + - simpleitk - defaults - conda-forge - - bioconda - - simpleitk dependencies: - blas=2.11=openblas - keepalive=0.5=py_1 @@ -128,4 +127,11 @@ dependencies: - zstd=1.3.7=h0b5b093_0 - simpleitk=1.2.2=py36hf484d3e_0 - pip: + - bids-validator==1.2.4 - dask==2.3.0 + - docopt==0.6.2 + - num2words==0.5.10 + - patsy==0.5.1 + - pybids==0.9.4 + - sqlalchemy==1.3.10 +