diff --git a/Dockerfile b/Dockerfile index 7b7c9f8..5c62bdc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -76,16 +76,18 @@ RUN apt-get clean \ fi \ && chmod -R 777 /neurodocker && chmod a+s /neurodocker -# install wb_command v1.3.2 +#----------------------------- +# Install Connectome Workbench +#----------------------------- RUN mkdir -p /opt WORKDIR /opt -RUN curl --retry 5 https://www.humanconnectome.org/storage/app/media/workbench/workbench-linux64-v1.3.2.zip --output workbench-linux64-v1.3.2.zip && \ - unzip workbench-linux64-v1.3.2.zip && \ - rm workbench-linux64-v1.3.2.zip +RUN curl --retry 5 https://www.humanconnectome.org/storage/app/media/workbench/workbench-linux64-v1.4.2.zip --output workbench-linux64-v1.4.2.zip && \ + unzip workbench-linux64-v1.4.2.zip && \ + rm workbench-linux64-v1.4.2.zip -#------------------- -# Install ANTs 2.2.0 -#------------------- +#------------- +# Install ANTs +#------------- RUN echo "Downloading ANTs ..." \ && curl -sSL --retry 5 https://dl.dropbox.com/s/2f4sui1z6lcgyek/ANTs-Linux-centos5_x86_64-v2.2.0-0740f91.tar.gz \ | tar zx -C /opt @@ -93,9 +95,9 @@ RUN echo "Downloading ANTs ..." \ ENV ANTSPATH=/opt/ants \ PATH=/opt/ants:$PATH -#------------------------ -# Install Convert3D 1.0.0 -#------------------------ +#------------------ +# Install Convert3D +#------------------ RUN echo "Downloading C3D ..." \ && mkdir /opt/c3d \ && curl -sSL --retry 5 https://sourceforge.net/projects/c3d/files/c3d/1.0.0/c3d-1.0.0-Linux-x86_64.tar.gz/download \ @@ -108,9 +110,9 @@ RUN echo "Downloading C3D ..." \ ENV C3DPATH=/opt/c3d/bin \ PATH=/opt/c3d/bin:$PATH -#-------------------------- -# Install FreeSurfer v5.3.0-HCP -#-------------------------- +#------------------- +# Install FreeSurfer +#------------------- RUN echo "Downloading FreeSurfer ..." \ && curl -sSL --retry 5 https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/5.3.0-HCP/freesurfer-Linux-centos6_x86_64-stable-pub-v5.3.0-HCP.tar.gz \ | tar xz -C /opt \ @@ -133,7 +135,7 @@ ENV FREESURFER_HOME=/opt/freesurfer RUN chmod 777 /opt/freesurfer #----------------------------------------------------------- -# Install FSL v5.0.10 +# Install FSL # FSL is non-free. If you are considering commerical use # of this Docker image, please consult the relevant license: # https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Licence @@ -158,16 +160,16 @@ ENV FSLDIR=/opt/fsl \ FSL_DIR=/opt/fsl \ PATH=/opt/fsl/bin:$PATH -#--------------------- +#-------------------------------- # Install MATLAB Compiler Runtime -#--------------------- +#-------------------------------- RUN mkdir /opt/matlab /opt/matlab_download WORKDIR /opt/matlab_download -RUN wget http://ssd.mathworks.com/supportfiles/downloads/R2016b/deployment_files/R2016b/installers/glnxa64/MCR_R2016b_glnxa64_installer.zip \ - && unzip MCR_R2016b_glnxa64_installer.zip \ +RUN wget http://ssd.mathworks.com/supportfiles/downloads/R2017a/deployment_files/R2017a/installers/glnxa64/MCR_R2017a_glnxa64_installer.zip \ + && unzip MCR_R2017a_glnxa64_installer.zip \ && ./install -agreeToLicense yes -mode silent -destinationFolder /opt/matlab \ && rm -rf /opt/matlab_download -#ENV LD_LIBRARY_PATH=/opt/matlab/v91/bin/glnxa64:/opt/matlab/v91/glnxa64:/opt/matlab/v91/runtime/glnxa64:$LD_LIBRARY_PATH +#ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/matlab/v91/bin/glnxa64:/opt/matlab/v91/glnxa64:/opt/matlab/v91/runtime/glnxa64 #--------------------- # Install MSM Binaries @@ -178,9 +180,9 @@ RUN mv /opt/homes/ecr05/MSM_HOCR_v2/* /opt/msm/ RUN rm -rf /opt/homes /opt/msm/MacOSX /opt/msm/Centos ENV MSMBINDIR=/opt/msm/Ubuntu -#---------------------------- -# Make perl version 5.20.3 -#---------------------------- +#---------- +# Make perl +#---------- RUN curl -sSL --retry 5 http://www.cpan.org/src/5.0/perl-5.20.3.tar.gz | tar zx -C /opt WORKDIR /opt/perl-5.20.3 RUN ./Configure -des -Dprefix=/usr/local @@ -189,17 +191,17 @@ RUN rm -f /usr/bin/perl && ln -s /usr/local/bin/perl /usr/bin/perl WORKDIR / RUN rm -rf /opt/perl-5.20.3/ -#------------------ +#--------------- # Make libnetcdf -#------------------ +#--------------- -RUN curl -sSL --retry 5 ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-4.6.1.tar.gz | tar zx -C /opt -WORKDIR /opt/netcdf-4.6.1/ +RUN curl -sSL --retry 5 https://github.com/Unidata/netcdf-c/archive/v4.6.1.tar.gz | tar zx -C /opt +WORKDIR /opt/netcdf-c-4.6.1/ RUN LDFLAGS=-L/usr/local/lib && CPPFLAGS=-I/usr/local/include && ./configure --disable-netcdf-4 --disable-dap --enable-shared --prefix=/usr/local RUN make && make install WORKDIR /usr/local/lib RUN ln -s libnetcdf.so.13.1.1 libnetcdf.so.6 -RUN rm -rf /opt/netcdf-4.6.1/ +RUN rm -rf /opt/netcdf-c-4.6.1/ ENV LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH #------------------------------------------ @@ -219,7 +221,7 @@ ENV WORKBENCHDIR=/opt/workbench \ RUN ln -s -f /lib/x86_64-linux-gnu/libz.so.1.2.11 /opt/workbench/libs_linux64/libz.so.1 # Fix libstdc++6 error -RUN ln -sf /usr/lib/x86_64-linux-gnu/libstdc++.so.6.0.24 /opt/matlab/v91/sys/os/glnxa64/libstdc++.so.6 +RUN ln -sf /usr/lib/x86_64-linux-gnu/libstdc++.so.6.0.24 /opt/matlab/v92/sys/os/glnxa64/libstdc++.so.6 # add dcan dependencies RUN mkdir /opt/dcan-tools @@ -227,13 +229,13 @@ WORKDIR /opt/dcan-tools # dcan hcp code RUN git clone -b 'v2.0.0' --single-branch --depth 1 https://github.com/DCAN-Labs/DCAN-HCP.git /opt/pipeline # dcan bold processing -RUN git clone -b 'v4.0.0' --single-branch --depth 1 https://github.com/DCAN-Labs/dcan_bold_processing.git dcan_bold_proc +RUN git clone -b 'hotfix/issue27' --single-branch --depth 1 https://github.com/DCAN-Labs/dcan_bold_processing.git dcan_bold_proc # dcan custom clean RUN git clone -b 'v0.0.0' --single-branch --depth 1 https://github.com/DCAN-Labs/CustomClean.git customclean # abcd task prep RUN git clone -b 'v0.0.0' --single-branch --depth 1 https://github.com/DCAN-Labs/abcd_task_prep.git ABCD_tfMRI # dcan executive summary -RUN git clone -b 'v0.0.0' --single-branch --depth 1 https://github.com/DCAN-Labs/ExecutiveSummary.git executivesummary +RUN git clone -b 'v0.0.1' --single-branch --depth 1 https://github.com/DCAN-Labs/ExecutiveSummary.git executivesummary # unzip template file RUN gunzip /opt/dcan-tools/executivesummary/summary_tools/templates/parasagittal_Tx_169_template.scene.gz @@ -255,50 +257,3 @@ COPY ["LICENSE", "/LICENSE"] ENTRYPOINT ["/entrypoint.sh"] WORKDIR / CMD ["--help"] - -#-------------------------------------- -# Save container specifications to JSON -#-------------------------------------- -RUN echo '{ \ - \n "pkg_manager": "apt", \ - \n "check_urls": true, \ - \n "instructions": [ \ - \n [ \ - \n "base", \ - \n "ubuntu:17.10" \ - \n ], \ - \n [ \ - \n "workbench", \ - \n { \ - \n "version": "1.3.2" \ - \n } \ - \n ], \ - \n [ \ - \n "ants", \ - \n { \ - \n "version": "2.2.0" \ - \n } \ - \n ], \ - \n [ \ - \n "c3d", \ - \n { \ - \n "version": "1.0.0" \ - \n } \ - \n ], \ - \n [ \ - \n "freesurfer", \ - \n { \ - \n "version": "5.3.0-HCP", \ - \n "license_path": "license.txt" \ - \n } \ - \n ], \ - \n [ \ - \n "fsl", \ - \n { \ - \n "version": "5.0.10" \ - \n } \ - \n ] \ - \n ], \ - \n "generation_timestamp": "2018-03-15 20:22:57", \ - \n "neurodocker_version": "0.3.2-9-g7441d77" \ - \n}' > /neurodocker/neurodocker_specs.json diff --git a/README.md b/README.md index 30e3de6..7f8a3e9 100644 --- a/README.md +++ b/README.md @@ -113,8 +113,8 @@ optional arguments: quartile range for participant group respiratory rate (breaths per minute), or to match bids physio data directly [3]. These parameters are highly recommended - for data acquired with a frequency of approx. 1 Hz or - more (TR<=1.0). Default is no filter. + for data acquired with a frequency of greater than 1 + Hz (TR less than 1 second). Default is no filter. Special pipeline options: Options which pertain to an alternative pipeline or an extra stage which is not diff --git a/app/SetupEnv.sh b/app/SetupEnv.sh index ff24d67..a74a3a0 100755 --- a/app/SetupEnv.sh +++ b/app/SetupEnv.sh @@ -38,7 +38,7 @@ export MSMBINDIR=/opt/msm/Ubuntu # Set up DCAN Environment Variables -export MCRROOT=/opt/matlab/v91 +export MCRROOT=/opt/matlab/v92 export DCANBOLDPROCDIR=/opt/dcan-tools/dcan_bold_proc export DCANBOLDPROCVER=DCANBOLDProc_v4.0.0 export EXECSUMDIR=/opt/dcan-tools/executivesummary diff --git a/app/helpers.py b/app/helpers.py index d661b2e..beba02f 100644 --- a/app/helpers.py +++ b/app/helpers.py @@ -35,7 +35,7 @@ def read_bids_dataset(bids_input, subject_list=None, collect_on_subject=False): } """ - layout = BIDSLayout(bids_input) + layout = BIDSLayout(bids_input, index_metadata=True) subjects = layout.get_subjects() # filter subject list @@ -60,16 +60,15 @@ def read_bids_dataset(bids_input, subject_list=None, collect_on_subject=False): 'Otherwise check that the bids folder provided is correct.' for subject, sessions in subsess: - # get relevant image modalities - anat = set_anatomicals(layout, subject, sessions) - func = set_functionals(layout, subject, sessions) - fmap = set_fieldmaps(layout, subject, sessions) + # get relevant image datatypes + anat, anat_types = set_anatomicals(layout, subject, sessions) + func, func_types = set_functionals(layout, subject, sessions) + fmap, fmap_types = set_fieldmaps(layout, subject, sessions) bids_data = { 'subject': subject, 'session': sessions if not collect_on_subject else None, - 'types': layout.get(subject=subject, session=sessions, - target='type', return_type='id') + 'types': anat_types.union(func_types, fmap_types) } bids_data.update(anat) bids_data.update(func) @@ -80,83 +79,120 @@ def read_bids_dataset(bids_input, subject_list=None, collect_on_subject=False): def set_anatomicals(layout, subject, sessions): """ - returns dictionary of anatomical (T1w, T2w) filepaths and associated - metadata. - :param subject: participant labels. + Returns dictionary of anatomical (T1w, T2w) filepaths and associated + metadata, and set of types. + :param subject: participant label. :param sessions: iterable of session labels. """ - t1ws = layout.get(subject=subject, session=sessions, modality='anat', - type='T1w', extensions='.nii.gz') - t1w_metadata = layout.get_metadata(t1ws[0].filename) + types = set() + t1ws = layout.get(subject=subject, session=sessions, datatype='anat', + suffix='T1w', extension=['nii.gz','nii']) + if len(t1ws): + t1w_metadata = layout.get_metadata(t1ws[0].path) + types.add('T1w') + else: + print("No T1w data was found for this subject.") + t1w_metadata = None - t2ws = layout.get(subject=subject, session=sessions, modality='anat', - type='T2w', extensions='.nii.gz') + t2ws = layout.get(subject=subject, session=sessions, datatype='anat', + suffix='T2w', extension=['nii.gz','nii']) if len(t2ws): - t2w_metadata = layout.get_metadata(t2ws[0].filename) + t2w_metadata = layout.get_metadata(t2ws[0].path) + types.add('T2w') else: t2w_metadata = None + spec = { - 't1w': [t.filename for t in t1ws], + 't1w': [t.path for t in t1ws], 't1w_metadata': t1w_metadata, - 't2w': [t.filename for t in t2ws], + 't2w': [t.path for t in t2ws], 't2w_metadata': t2w_metadata } - return spec + return spec, types def set_functionals(layout, subject, sessions): """ - returns dictionary of bold filepaths and associated metadata. - :param subject: participant labels. + Returns dictionary of functional (bold) filepaths and associated metadata, + and set of types. + :param subject: participant label. :param sessions: iterable of session labels. """ - func = layout.get(subject=subject, session=sessions, modality='func', - type='bold', extensions='.nii.gz') - func_metadata = [layout.get_metadata(x.filename) for x in func] + func = layout.get(subject=subject, session=sessions, datatype='func', + suffix='bold', extension=['nii.gz','nii']) + func_metadata = [layout.get_metadata(x.path) for x in func] + + types = {f.entities['suffix'] for f in func} spec = { - 'func': [f.filename for f in func], + 'func': [f.path for f in func], 'func_metadata': func_metadata } - return spec + return spec, types def set_fieldmaps(layout, subject, sessions): """ - returns dictionary of fieldmap (epi or magnitude) filepaths and - associated metadata. - :param subject: participant labels. + Returns dictionary of fieldmap (epi or magnitude) filepaths and associated + metadata. Only fieldmaps with 'IntendedFor' metadata are returned. Also + returns set of types. + :param subject: participant label. :param sessions: iterable of session labels. """ - fmap = layout.get(subject=subject, session=sessions, modality='fmap', - extensions='.nii.gz') - fmap_metadata = [layout.get_metadata(x.filename) for x in fmap] + fmap = [] + fmap_metadata = [] + + # Currently, we only support distortion correction methods that use epi, + # magnitude, or phasediff field maps. (See fmap_types in ParameterSettings + # in pipelines.py.) + supported_fmaps = ['epi', 'magnitude', 'magnitude1', 'magnitude2', + 'phasediff', 'phase1', 'phase2'] + extensions = ['nii.gz','nii'] + for bids_file in layout.get(subject=subject, session=sessions, + datatype='fmap', suffix=supported_fmaps, extension=extensions): + + # Only include fmaps with non-empty 'IntendedFor' metadata. + meta = bids_file.get_metadata() + if 'IntendedFor' in meta.keys() and len(meta['IntendedFor']): + fmap.append(bids_file) + fmap_metadata.append(meta) + + types = {x.entities['suffix'] for x in fmap} # handle case spin echo - types = [x.type for x in fmap] - indices = [i for i, x in enumerate(types) if x == 'epi'] - if len(indices): - # @TODO read IntendedFor field to map field maps to functionals. - positive = [i for i, x in enumerate(fmap_metadata) if '-' not in x[ - 'PhaseEncodingDirection']] - negative = [i for i, x in enumerate(fmap_metadata) if '-' in x[ - 'PhaseEncodingDirection']] - fmap = {'positive': [fmap[i].filename for i in positive], - 'negative': [fmap[i].filename for i in negative]} - fmap_metadata = { - 'positive': [fmap_metadata[i] for i in positive], - 'negative': [fmap_metadata[i] for i in negative]} - # @TODO check that no orthogonal field maps were collected. - - # handle case fieldmap # @TODO - elif 'magnitude' in fmap: + if 'epi' in types: + + if len(types) > 1: + print(""" + The pipeline must choose distortion correction method based on the + type(s) of field maps available. Therefore, there cannot be more + than one type of field map. Please choose either spin echo (epi) or + magnitude/phasediff field maps, and make sure those json files have + 'IntendedFor' values. + """) + raise Exception('Too many field map types found: %s' % types) + else: + # We have spin echo - and nothing else - so sort out its data. + positive = [i for i, x in enumerate(fmap_metadata) if '-' not in x[ + 'PhaseEncodingDirection']] + negative = [i for i, x in enumerate(fmap_metadata) if '-' in x[ + 'PhaseEncodingDirection']] + fmap = {'positive': [fmap[i].path for i in positive], + 'negative': [fmap[i].path for i in negative]} + fmap_metadata = { + 'positive': [fmap_metadata[i] for i in positive], + 'negative': [fmap_metadata[i] for i in negative]} + + else: + # The other field-map types found above will be filtered out in the + # implementation - see pipelines.py. pass spec = { 'fmap': fmap, 'fmap_metadata': fmap_metadata } - return spec + return spec, types def get_readoutdir(metadata): @@ -188,7 +224,7 @@ def get_readoutdir(metadata): def get_realdwelltime(metadata): """ - attempts to compute real dwell time from metadata fields. Certain + attempts to compute real dwell time from metadata fields. Certain reconstruction parameters such as phaseOversampling and phaseResolution may not be accounted for. """ diff --git a/app/requirements.txt b/app/requirements.txt index 2130e51..7e2f002 100644 --- a/app/requirements.txt +++ b/app/requirements.txt @@ -1,2 +1,2 @@ -pybids<=0.6.5 +pybids==0.9.2 duecredit diff --git a/app/run.py b/app/run.py index f5cb584..580df22 100755 --- a/app/run.py +++ b/app/run.py @@ -161,8 +161,8 @@ def generate_parser(parser=None): 'range for participant group respiratory rate (breaths per ' 'minute), or to match bids physio data directly [3]. These ' 'parameters are highly recommended for data acquired with a ' - 'frequency of approx. 1 Hz or more (TR<=1.0). Default is no ' - 'filter.' + 'frequency of greater than 1 Hz (TR less than 1 second). ' + 'Default is no filter.' ) extras = parser.add_argument_group( 'Special pipeline options',