From 26eb6e1fab27c4b68ec7f482d054d8b8fd4eb597 Mon Sep 17 00:00:00 2001 From: Eric Feczko Date: Thu, 28 Jul 2022 16:59:10 -0500 Subject: [PATCH 01/14] adding goodvoxels freesurfer branch --- fmriprep/workflows/bold/resampling.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/fmriprep/workflows/bold/resampling.py b/fmriprep/workflows/bold/resampling.py index 563655bf8..6d12d4f27 100644 --- a/fmriprep/workflows/bold/resampling.py +++ b/fmriprep/workflows/bold/resampling.py @@ -145,6 +145,13 @@ def select_target(subject_id, space): mem_gb=mem_gb * 3, ) sampler.inputs.hemi = ["lh", "rh"] + sample_medial_wall_to_volume = pe.MapNode( + fs.Surface2VolTransform(), + iterfield=["hemi","source_file"], + name="sample_medial_wall_to_volume", + mem_gb=mem_gb * 3, + ) + sample_medial_wall_to_volume.inputs.hemi = ["lh", "rh"] update_metadata = pe.MapNode( GiftiSetAnatomicalStructure(), iterfield=["in_file"], @@ -193,10 +200,26 @@ def select_target(subject_id, space): mem_gb=DEFAULT_MEMORY_MIN_GB, ) + binarize_volume = pe.Node( + fs.Binarize( + min=-10000, + ), + + ) + mask_volume = pe.Node( + fs.ApplyMask() + ) # fmt:off workflow.connect([ (inputnode, medial_nans, [("subjects_dir", "subjects_dir")]), (sampler, medial_nans, [("out_file", "in_file")]), + (medial_nans,sample_medial_wall_to_volume, [("out_file"),("source_file")]), + (inputnode,sample_medial_wall_to_volume, [("source_file"),("template_file")]), + (inputnode,sample_medial_wall_to_volume, [("subjects_dir"),("subjects_dir")]), + (inputnode,sample_medial_wall_to_volume, [("subject_id"),("subject_id")]), + (sample_medial_wall_to_volume,binarize_volume, [("out_file"),("in_file")]), + (binarize_volume,mask_volume, [("out_file"),("mask_file")]), + (inputnode,mask_volume, [("source_file"),("in_file")]), (medial_nans, update_metadata, [("out_file", "in_file")]), ]) # fmt:on From 0e52bf3d30ef316b1db0cdcc680edc76c6f8c09a Mon Sep 17 00:00:00 2001 From: ericfeczko <18431357+ericfeczko@users.noreply.github.com> Date: Wed, 17 Aug 2022 12:46:16 -0500 Subject: [PATCH 02/14] added workflow connectors for next stages -- one more needs to be added --- fmriprep/workflows/bold/resampling.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/fmriprep/workflows/bold/resampling.py b/fmriprep/workflows/bold/resampling.py index 6d12d4f27..ca9962a09 100644 --- a/fmriprep/workflows/bold/resampling.py +++ b/fmriprep/workflows/bold/resampling.py @@ -220,6 +220,9 @@ def select_target(subject_id, space): (sample_medial_wall_to_volume,binarize_volume, [("out_file"),("in_file")]), (binarize_volume,mask_volume, [("out_file"),("mask_file")]), (inputnode,mask_volume, [("source_file"),("in_file")]), + (mask_volume,ConvertFStoNIFTI [("out_file"),("in_file")]), + (ConvertFStoNIFTI,stdev_timeseries, [("out_file"),("in_file")]), + (stdev_timeseries,filter_for_outliers, [("out_file"),("in_file")]), (medial_nans, update_metadata, [("out_file", "in_file")]), ]) # fmt:on From e06c4129cc7feb918a2bcc2a55e9781017f4b4dc Mon Sep 17 00:00:00 2001 From: Anders Perrone Date: Wed, 17 Aug 2022 15:00:07 -0700 Subject: [PATCH 03/14] Create init workflow for masking medial wall outliers --- fmriprep/workflows/bold/resampling.py | 110 +++++++++++++++++++++++++- 1 file changed, 107 insertions(+), 3 deletions(-) diff --git a/fmriprep/workflows/bold/resampling.py b/fmriprep/workflows/bold/resampling.py index ca9962a09..3ee3b8091 100644 --- a/fmriprep/workflows/bold/resampling.py +++ b/fmriprep/workflows/bold/resampling.py @@ -31,8 +31,9 @@ """ from ...config import DEFAULT_MEMORY_MIN_GB +from nipype import Function from nipype.pipeline import engine as pe -from nipype.interfaces import utility as niu, freesurfer as fs +from nipype.interfaces import utility as niu, freesurfer as fs, fsl import nipype.interfaces.workbench as wb @@ -209,6 +210,87 @@ def select_target(subject_id, space): mask_volume = pe.Node( fs.ApplyMask() ) + ConvertFStoNIFTI = pe.Node( + fs.MRIConvert(ext='.nii') + ) + stdev_volume = pe.Node( + fsl.maths.StdImage(dimension='T') + ) + mean_volume = pe.Node( + fsl.maths.MeanImage(dimension='T') + ) + merge_cov_inputs = pe.Node( + niu.Merge(1) + ) + cov_volume = pe.Node( + fsl.maths.MultiImageMaths(), + op_string = '-div %s' + ) + cov_mean = pe.Node( + fsl.ImageStats(), + op_string = '-M' + ) + cov_std = pe.Node( + fsl.ImageStats(), + op_string = '-S' + ) + merge_cov_stats = pe.Node( + niu.Merge(2) + ) + + def _calc_upper_thr(in_stats): + return in_stats[0] + (in_stats[1] * 4) + + upper_thr_val = pe.Node( + Function( + input_names=["in_stats"] + output_names=["upper_thresh"] + function=_calc_upper_thr + ) + ) + + def _calc_lower_thr(in_stats): + return in_stats[0] - (in_stats[1] * 4) + + lower_thr_val = pe.Node( + Function( + input_names=["in_stats"] + output_names=["lower_thresh"] + function=_calc_lower_thr + ) + ) + + upper_thr_volume = pe.Node( + fsl.maths.Threshold() + ) + + lower_thr_volume = pe.Node( + fsl.maths.Threshold() + ) + + upper_binarize_volume_fsl = pe.Node( + fsl.maths.UnaryMaths(operation="bin") + ) + + lower_binarize_volume_fsl = pe.Node( + fsl.maths.UnaryMaths(operation="bin") + ) + + merge_thr_volume = pe.Node( + niu.Merge(1) + ) + + filter_for_outliers = pe.Node( + fsl.maths.MultiImageMaths(), + op_string = '-sub %s -mul -1' + ) + ConvertNIFTItoFS = pe.Node( + fs.MRIConvert(ext='.mgz') + ) + mask_volume_2 = pe.Node( + fs.ApplyMask() + ) + # fmt:off workflow.connect([ (inputnode, medial_nans, [("subjects_dir", "subjects_dir")]), @@ -221,8 +303,30 @@ def select_target(subject_id, space): (binarize_volume,mask_volume, [("out_file"),("mask_file")]), (inputnode,mask_volume, [("source_file"),("in_file")]), (mask_volume,ConvertFStoNIFTI [("out_file"),("in_file")]), - (ConvertFStoNIFTI,stdev_timeseries, [("out_file"),("in_file")]), - (stdev_timeseries,filter_for_outliers, [("out_file"),("in_file")]), + (ConvertFStoNIFTI,stdev_volume [("out_file"),("in_file")]) + (ConvertFStoNIFTI,mean_volume [("out_file"),("in_file")]) + (stdev_volume, merge_cov_inputs [("out_file", "in1")]) + (merge_cov_inputs, cov_volume [("out", "operand_files")]) + (mean_volume, cov_volume [("out_file", "in_file")]) + (cov_volume, cov_mean [("out_file", "in_file")]) + (cov_volume, cov_std [("out_file", "in_file")]) + (cov_mean, merge_cov_stats [("out_stat", "in1")]) + (cov_std, merge_cov_stats [("out_stat", "in2")]) + (merge_cov_stats, upper_thr_val [("out", "in_stats")]) + (merge_cov_stats, lower_thr_val [("out", "in_stats")]) + (upper_thr_val, upper_thr_volume [("upper_thresh", "thresh")]) + (cov_volume, upper_thr_volume [("out_file", "in_file")]) + (lower_thr_val, lower_thr_volume [("lower_thresh", "thresh")]) + (cov_volume, lower_thr_volume [("out_file", "in_file")]) + (lower_thr_volume, lower_binarize_volume_fsl [("out_file", "in_file")]) + (upper_thr_volume, upper_binarize_volume_fsl [("out_file", "in_file")]) + (lower_binarize_volume_fsl, merge_thr_volume [("out_file", "in1")]) + (merge_thr_volume, filter_for_outliers [("out_file", "operand_files")]) + (upper_binarize_volume_fsl, filter_for_outliers [("out_file", "in_file")]) + (filter_for_outliers, ConvertNIFTItoFS [("out_file", "in_file")]) + (ConvertNIFTItoFS, mask_volume_2 [("out_file", "mask_file")]) + (mask_volume, mask_volume_2 [("out_file", "in_file")]) + #TODO link outlier mask to medial nans (medial_nans, update_metadata, [("out_file", "in_file")]), ]) # fmt:on From 64e61e70f45cc529553021049176083e8e08c522 Mon Sep 17 00:00:00 2001 From: madisoth <56737848+madisoth@users.noreply.github.com> Date: Thu, 18 Aug 2022 14:32:53 -0700 Subject: [PATCH 04/14] output "goodvoxels"-masked BOLD surfs in bold_surf_wf (+1 squashed commits) Squashed commits: [b1597c2b] output goodvoxels-masked BOLD surfs in bold_surf_wf --- fmriprep/workflows/bold/resampling.py | 120 ++++++++++++++------------ 1 file changed, 66 insertions(+), 54 deletions(-) diff --git a/fmriprep/workflows/bold/resampling.py b/fmriprep/workflows/bold/resampling.py index 3ee3b8091..8ea604918 100644 --- a/fmriprep/workflows/bold/resampling.py +++ b/fmriprep/workflows/bold/resampling.py @@ -148,7 +148,7 @@ def select_target(subject_id, space): sampler.inputs.hemi = ["lh", "rh"] sample_medial_wall_to_volume = pe.MapNode( fs.Surface2VolTransform(), - iterfield=["hemi","source_file"], + iterfield=["hemi", "source_file"], name="sample_medial_wall_to_volume", mem_gb=mem_gb * 3, ) @@ -202,10 +202,7 @@ def select_target(subject_id, space): ) binarize_volume = pe.Node( - fs.Binarize( - min=-10000, - ), - + fs.Binarize(min=-10000) ) mask_volume = pe.Node( fs.ApplyMask() @@ -223,16 +220,13 @@ def select_target(subject_id, space): niu.Merge(1) ) cov_volume = pe.Node( - fsl.maths.MultiImageMaths(), - op_string = '-div %s' + fsl.maths.MultiImageMaths(op_string='-div %s'), ) cov_mean = pe.Node( - fsl.ImageStats(), - op_string = '-M' + fsl.ImageStats(op_string='-M'), ) cov_std = pe.Node( - fsl.ImageStats(), - op_string = '-S' + fsl.ImageStats(op_string='-S'), ) merge_cov_stats = pe.Node( niu.Merge(2) @@ -243,8 +237,8 @@ def _calc_upper_thr(in_stats): upper_thr_val = pe.Node( Function( - input_names=["in_stats"] - output_names=["upper_thresh"] + input_names=["in_stats"], + output_names=["upper_thresh"], function=_calc_upper_thr ) ) @@ -254,25 +248,25 @@ def _calc_lower_thr(in_stats): lower_thr_val = pe.Node( Function( - input_names=["in_stats"] - output_names=["lower_thresh"] + input_names=["in_stats"], + output_names=["lower_thresh"], function=_calc_lower_thr ) ) upper_thr_volume = pe.Node( - fsl.maths.Threshold() + fsl.maths.Threshold(direction='below') ) lower_thr_volume = pe.Node( - fsl.maths.Threshold() + fsl.maths.Threshold(direction='above') ) - upper_binarize_volume_fsl = pe.Node( + binarize_upper_volume = pe.Node( fsl.maths.UnaryMaths(operation="bin") ) - lower_binarize_volume_fsl = pe.Node( + binarize_lower_volume = pe.Node( fsl.maths.UnaryMaths(operation="bin") ) @@ -282,52 +276,70 @@ def _calc_lower_thr(in_stats): filter_for_outliers = pe.Node( fsl.maths.MultiImageMaths(), - op_string = '-sub %s -mul -1' + op_string='-sub %s -mul -1' ) + ConvertNIFTItoFS = pe.Node( fs.MRIConvert(ext='.mgz') ) - mask_volume_2 = pe.Node( + + apply_goodvoxels_mask = pe.Node( fs.ApplyMask() ) + goodvoxels_sampler = pe.Node( + fs.SampleToSurface( + cortex_mask=True, + interp_method="trilinear", + out_type="gii", + override_reg_subj=True, + sampling_method="average", + sampling_range=(0, 1, 0.2), + sampling_units="frac", + ), + iterfield=["hemi"], + name="goodvoxels_sampler", + mem_gb=mem_gb * 3, + ) + sampler.inputs.hemi = ["lh", "rh"] + # fmt:off workflow.connect([ (inputnode, medial_nans, [("subjects_dir", "subjects_dir")]), (sampler, medial_nans, [("out_file", "in_file")]), - (medial_nans,sample_medial_wall_to_volume, [("out_file"),("source_file")]), - (inputnode,sample_medial_wall_to_volume, [("source_file"),("template_file")]), - (inputnode,sample_medial_wall_to_volume, [("subjects_dir"),("subjects_dir")]), - (inputnode,sample_medial_wall_to_volume, [("subject_id"),("subject_id")]), - (sample_medial_wall_to_volume,binarize_volume, [("out_file"),("in_file")]), - (binarize_volume,mask_volume, [("out_file"),("mask_file")]), - (inputnode,mask_volume, [("source_file"),("in_file")]), - (mask_volume,ConvertFStoNIFTI [("out_file"),("in_file")]), - (ConvertFStoNIFTI,stdev_volume [("out_file"),("in_file")]) - (ConvertFStoNIFTI,mean_volume [("out_file"),("in_file")]) - (stdev_volume, merge_cov_inputs [("out_file", "in1")]) - (merge_cov_inputs, cov_volume [("out", "operand_files")]) - (mean_volume, cov_volume [("out_file", "in_file")]) - (cov_volume, cov_mean [("out_file", "in_file")]) - (cov_volume, cov_std [("out_file", "in_file")]) - (cov_mean, merge_cov_stats [("out_stat", "in1")]) - (cov_std, merge_cov_stats [("out_stat", "in2")]) - (merge_cov_stats, upper_thr_val [("out", "in_stats")]) - (merge_cov_stats, lower_thr_val [("out", "in_stats")]) - (upper_thr_val, upper_thr_volume [("upper_thresh", "thresh")]) - (cov_volume, upper_thr_volume [("out_file", "in_file")]) - (lower_thr_val, lower_thr_volume [("lower_thresh", "thresh")]) - (cov_volume, lower_thr_volume [("out_file", "in_file")]) - (lower_thr_volume, lower_binarize_volume_fsl [("out_file", "in_file")]) - (upper_thr_volume, upper_binarize_volume_fsl [("out_file", "in_file")]) - (lower_binarize_volume_fsl, merge_thr_volume [("out_file", "in1")]) - (merge_thr_volume, filter_for_outliers [("out_file", "operand_files")]) - (upper_binarize_volume_fsl, filter_for_outliers [("out_file", "in_file")]) - (filter_for_outliers, ConvertNIFTItoFS [("out_file", "in_file")]) - (ConvertNIFTItoFS, mask_volume_2 [("out_file", "mask_file")]) - (mask_volume, mask_volume_2 [("out_file", "in_file")]) - #TODO link outlier mask to medial nans - (medial_nans, update_metadata, [("out_file", "in_file")]), + (medial_nans, sample_medial_wall_to_volume, [("out_file", "source_file")]), + (inputnode, sample_medial_wall_to_volume, [("source_file", "template_file")]), + (inputnode, sample_medial_wall_to_volume, [("subjects_dir", "subjects_dir")]), + (inputnode, sample_medial_wall_to_volume, [("subject_id", "subject_id")]), + (sample_medial_wall_to_volume, binarize_volume, [("out_file", "in_file")]), + (binarize_volume, mask_volume, [("out_file", "mask_file")]), + (inputnode, mask_volume, [("source_file", "in_file")]), + (mask_volume, ConvertFStoNIFTI, [("out_file", "in_file")]), + (ConvertFStoNIFTI, stdev_volume, [("out_file", "in_file")]), + (ConvertFStoNIFTI, mean_volume, [("out_file", "in_file")]), + (stdev_volume, merge_cov_inputs, [("out_file", "in1")]), + (merge_cov_inputs, cov_volume, [("out", "operand_files")]), + (mean_volume, cov_volume, [("out_file", "in_file")]), + (cov_volume, cov_mean, [("out_file", "in_file")]), + (cov_volume, cov_std, [("out_file", "in_file")]), + (cov_mean, merge_cov_stats, [("out_stat", "in1")]), + (cov_std, merge_cov_stats, [("out_stat", "in2")]), + (merge_cov_stats, upper_thr_val, [("out", "in_stats")]), + (merge_cov_stats, lower_thr_val, [("out", "in_stats")]), + (cov_volume, upper_thr_volume, [("out_file", "in_file")]), + (upper_thr_val, upper_thr_volume, [("upper_thresh", "thresh")]), + (cov_volume, lower_thr_volume, [("out_file", "in_file")]), + (lower_thr_val, lower_thr_volume, [("lower_thresh", "thresh")]), + (upper_thr_volume, binarize_upper_volume, [("out_file", "in_file")]), + (lower_thr_volume, binarize_lower_volume, [("out_file", "in_file")]), + (binarize_upper_volume, merge_thr_volume, [("out_file", "in1")]), + (merge_thr_volume, filter_for_outliers, [("out_file", "operand_files")]), + (binarize_lower_volume, filter_for_outliers, [("out_file", "in_file")]), + (filter_for_outliers, ConvertNIFTItoFS, [("out_file", "in_file")]), + (ConvertNIFTItoFS, apply_goodvoxels_mask, [("out_file", "mask_file")]), + (mask_volume, apply_goodvoxels_mask, [("out_file", "in_file")]), + (apply_goodvoxels_mask, medial_nans, [("out_file", "in_file")]), + (goodvoxels_sampler, update_metadata, [("out_file", "in_file")]) ]) # fmt:on return workflow From 260c7a66fa73925599d17c38ba7146628565efb1 Mon Sep 17 00:00:00 2001 From: madisoth <56737848+madisoth@users.noreply.github.com> Date: Sun, 21 Aug 2022 20:16:19 -0700 Subject: [PATCH 05/14] goodvoxels_gifti work --- fmriprep/workflows/base.py | 1 + fmriprep/workflows/bold/base.py | 3 + fmriprep/workflows/bold/resampling.py | 391 ++++++++++++++++++++------ 3 files changed, 315 insertions(+), 80 deletions(-) diff --git a/fmriprep/workflows/base.py b/fmriprep/workflows/base.py index ef43476ab..5bfc9db8f 100644 --- a/fmriprep/workflows/base.py +++ b/fmriprep/workflows/base.py @@ -390,6 +390,7 @@ def init_single_subject_wf(subject_id): # Undefined if --fs-no-reconall, but this is safe ('outputnode.subjects_dir', 'inputnode.subjects_dir'), ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.surfaces', 'inputnode.anat_giftis'), ('outputnode.t1w2fsnative_xfm', 'inputnode.t1w2fsnative_xfm'), ('outputnode.fsnative2t1w_xfm', 'inputnode.fsnative2t1w_xfm')]), ]) diff --git a/fmriprep/workflows/bold/base.py b/fmriprep/workflows/bold/base.py index 5f8b8ca76..9b2055ae6 100644 --- a/fmriprep/workflows/bold/base.py +++ b/fmriprep/workflows/bold/base.py @@ -351,6 +351,7 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): "anat2std_xfm", "std2anat_xfm", "template", + "anat_giftis" "t1w2fsnative_xfm", "fsnative2t1w_xfm", "fmap", @@ -954,6 +955,8 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): ("subjects_dir", "inputnode.subjects_dir"), ("subject_id", "inputnode.subject_id"), ("t1w2fsnative_xfm", "inputnode.t1w2fsnative_xfm"), + ("anat_giftis", "inputnode.anat_giftis"), + ("t1w_mask", "inputnode.t1w_mask") ]), (bold_t1_trans_wf, bold_surf_wf, [("outputnode.bold_t1", "inputnode.source_file")]), (bold_surf_wf, outputnode, [("outputnode.surfaces", "surfaces")]), diff --git a/fmriprep/workflows/bold/resampling.py b/fmriprep/workflows/bold/resampling.py index 8ea604918..6dbad6c5b 100644 --- a/fmriprep/workflows/bold/resampling.py +++ b/fmriprep/workflows/bold/resampling.py @@ -29,6 +29,7 @@ .. autofunction:: init_bold_preproc_trans_wf """ +from ctypes import create_string_buffer from ...config import DEFAULT_MEMORY_MIN_GB from nipype import Function @@ -85,9 +86,11 @@ def init_bold_surf_wf(mem_gb, surface_spaces, medial_surface_nan, name="bold_sur """ from nipype.interfaces.io import FreeSurferSource + from nipype.interfaces.workbench import CreateSignedDistanceVolume from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.surf import GiftiSetAnatomicalStructure + workflow = Workflow(name=name) workflow.__desc__ = """\ The BOLD time-series were resampled onto the following surfaces @@ -99,7 +102,12 @@ def init_bold_surf_wf(mem_gb, surface_spaces, medial_surface_nan, name="bold_sur inputnode = pe.Node( niu.IdentityInterface( - fields=["source_file", "subject_id", "subjects_dir", "t1w2fsnative_xfm"] + fields=["source_file", + "subject_id", + "subjects_dir", + "t1w2fsnative_xfm", + "anat_giftis", + "t1w_mask"] ), name="inputnode", ) @@ -166,8 +174,120 @@ def select_target(subject_id, space): name="outputnode", ) + # [0, 1, 2, 3] = lh wm, rh wm, lh pial, rh pial + select_wm = pe.Node( + niu.Select(), + index=[0, 1], + name="select_wm_pial", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + + select_pial = pe.Node( + niu.Select(), + index=[2, 3], + name="select_pial", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + + create_wm_distvol = pe.MapNode( + CreateSignedDistanceVolume(), + iterfield=["surface"], + name="create_wm_distvol", + mem_gb=mem_gb, + ) + + create_pial_distvol = pe.MapNode( + CreateSignedDistanceVolume(), + iterfield=["surface"], + name="create_pial_distvol", + mem_gb=mem_gb, + ) + + thresh_wm_distvol = pe.MapNode( + fsl.maths.MathsCommand(args="-thr 0 -bin -mul 255"), + iterfield=["in_file"], + name="thresh_wm_distvol", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + + uthresh_pial_distvol = pe.MapNode( + fsl.maths.MathsCommand(args="-uthr 0 -abs -bin -mul 255"), + iterfield=["in_file"], + name="thresh_pial_distvol", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + + bin_wm_distvol = pe.MapNode( + fsl.maths.UnaryMaths(operation="bin"), + iterfield=["in_file"], + name="bin_threshed_distvol", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + + bin_pial_distvol = pe.MapNode( + fsl.maths.UnaryMaths(operation="bin"), + iterfield=["in_file"], + name="bin_pial_distvol", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + + split_wm_distvol = pe.Node( + niu.Split(splits=[1, 1]), + name="split_wm_distvol", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + merge_wm_distvol_no_flatten = pe.Node( + niu.Merge(2), + no_flatten=True, + name="merge_wm_distvol_no_flatten", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + + make_ribbon_vol = pe.MapNode( + fsl.maths.MultiImageMaths(op_string="-mas %s -mul 255"), + iterfield=["in_file", "operand_files"], + name="make_ribbon_vol", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + bin_ribbon_vol = pe.MapNode( + fsl.maths.UnaryMaths(operation="bin"), + iterfield=["in_file"], + name="bin_ribbon_vol", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + split_squeeze_ribbon_vol = pe.Node( + niu.Split(splits=[1, 1], squeeze=True), + name="split_squeeze_ribbon", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + combine_ribbon_vol_hemis = pe.Node( + fsl.maths.BinaryMaths(operation="add"), + name="combine_ribbon_vol_hemis", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) # fmt:off workflow.connect([ + (inputnode, select_wm, [("anat_giftis", "inlist")]), + (inputnode, select_pial, [("anat_giftis", "inlist")]), + (select_wm, create_wm_distvol, [("out", "surface")]), + (inputnode, create_wm_distvol, [("t1w_mask", "ref_space")]), + (select_pial, create_pial_distvol, [("out", "surface")]), + (inputnode, create_pial_distvol, [("t1w_mask", "ref_space")]), + (create_wm_distvol, thresh_wm_distvol, [("out_vol", "in_file")]), + (create_pial_distvol, uthresh_pial_distvol, [("out_vol", "in_file")]), + (thresh_wm_distvol, bin_wm_distvol, [("t1w_mask", "ref_space")]), + (uthresh_pial_distvol, bin_pial_distvol, [("out_vol", "in_file")]), + (bin_wm_distvol, split_wm_distvol, [("out_vol", "inlist")]), + (split_wm_distvol, merge_wm_distvol_no_flatten, [("out1", "in1")]), + (split_wm_distvol, merge_wm_distvol_no_flatten, [("out2", "in2")]), + (bin_pial_distvol, make_ribbon_vol, [("out_file", "in_file")]), + (merge_wm_distvol_no_flatten, make_ribbon_vol, [("out", "operand_files")]), + (make_ribbon_vol, bin_ribbon_vol, [("out_file", "in_file")]), + (bin_ribbon_vol, split_squeeze_ribbon_vol, "out_file", "inlist"), + (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [("out1", "in_file")]) + (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [("out2", "operand_file")]), + ]) + workflow.connect([ (inputnode, get_fsnative, [("subject_id", "subject_id"), ("subjects_dir", "subjects_dir")]), (inputnode, targets, [("subject_id", "subject_id")]), @@ -181,9 +301,9 @@ def select_target(subject_id, space): (itersource, rename_src, [("target", "subject")]), (itk2lta, sampler, [("out", "reg_file")]), (targets, sampler, [("out", "target_subject")]), - (rename_src, sampler, [("out_file", "source_file")]), - (update_metadata, outputnode, [("out_file", "surfaces")]), - (itersource, outputnode, [("target", "target")]), + # (rename_src, sampler, [("out_file", "source_file")]), + # (update_metadata, outputnode, [("out_file", "surfaces")]), + # (itersource, outputnode, [("target", "target")]), ]) # fmt:on @@ -194,100 +314,183 @@ def select_target(subject_id, space): from niworkflows.interfaces.freesurfer import MedialNaNs # Refine if medial vertices should be NaNs - medial_nans = pe.MapNode( - MedialNaNs(), - iterfield=["in_file"], - name="medial_nans", + stdev_volume = pe.Node( + fsl.maths.StdImage(dimension='T'), + name="stdev_volume", mem_gb=DEFAULT_MEMORY_MIN_GB, ) - - binarize_volume = pe.Node( - fs.Binarize(min=-10000) + mean_volume = pe.Node( + fsl.maths.MeanImage(dimension='T'), + name="mean_volume", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) - mask_volume = pe.Node( - fs.ApplyMask() + # merge_cov_inputs = pe.Node( + # niu.Merge(1), + # name="merge_cov_inputs", + # mem_gb=DEFAULT_MEMORY_MIN_GB, + # ) + cov_volume = pe.Node( + fsl.maths.BinaryMaths(op_string='-div %s '), + name="cov_volume", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) - ConvertFStoNIFTI = pe.Node( - fs.MRIConvert(ext='.nii') + + cov_ribbon = pe.Node( + fsl.ApplyMask(), + name="cov_ribbon", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) - stdev_volume = pe.Node( - fsl.maths.StdImage(dimension='T') + + cov_ribbon_mean = pe.Node( + fsl.ImageStats(op_string='-M '), + name="cov_mean", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) - mean_volume = pe.Node( - fsl.maths.MeanImage(dimension='T') + cov_ribbon_std = pe.Node( + fsl.ImageStats(op_string='-S '), + name="cov_std", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) - merge_cov_inputs = pe.Node( - niu.Merge(1) + + cov_ribbon_norm = pe.Node( + fsl.maths.BinaryMaths(operation='div'), + name="cov_ribbon_norm", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) - cov_volume = pe.Node( - fsl.maths.MultiImageMaths(op_string='-div %s'), + + smooth_norm = pe.Node( + fsl.maths.MathsCommand(args="-bin -s 5 "), + name="smooth_norm", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) - cov_mean = pe.Node( - fsl.ImageStats(op_string='-M'), + + merge_smooth_norm = pe.Node( + niu.Merge(1), + name="merge_smooth_norm", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) - cov_std = pe.Node( - fsl.ImageStats(op_string='-S'), + + cov_ribbon_norm_smooth = pe.Node( + fsl.maths.MultiImageMaths(args="-s 5 -div %s -dilD "), + name="cov_ribbon_norm_smooth", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) + + cov_norm_modulate = pe.Node( + fsl.maths.MultiImageMaths(op_string='-div %s -div %s '), + name="cov_ribbon_modulate", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + merge_cov_stats = pe.Node( - niu.Merge(2) + niu.Merge(2), + name="merge_cov_stats", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) + cov_norm_modulate_ribbon = pe.Node( + fsl.ApplyMask(), + name="cov_norm_modulate_ribbon", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) def _calc_upper_thr(in_stats): - return in_stats[0] + (in_stats[1] * 4) + return in_stats[0] + (in_stats[1] * 0.5) upper_thr_val = pe.Node( Function( input_names=["in_stats"], output_names=["upper_thresh"], function=_calc_upper_thr - ) + ), + name="upper_thr_val", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) def _calc_lower_thr(in_stats): - return in_stats[0] - (in_stats[1] * 4) + return in_stats[1] - (in_stats[0] * 0.5) lower_thr_val = pe.Node( Function( input_names=["in_stats"], output_names=["lower_thresh"], function=_calc_lower_thr - ) + ), + name="lower_thr_val", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) upper_thr_volume = pe.Node( - fsl.maths.Threshold(direction='below') + fsl.maths.Threshold(direction='below'), + name="upper_thr_volume", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) lower_thr_volume = pe.Node( - fsl.maths.Threshold(direction='above') + fsl.maths.Threshold(direction='above'), + name="lower_thr_volume", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) binarize_upper_volume = pe.Node( - fsl.maths.UnaryMaths(operation="bin") + fsl.maths.UnaryMaths(operation="bin"), + name="binarize_upper_volume", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) binarize_lower_volume = pe.Node( - fsl.maths.UnaryMaths(operation="bin") + fsl.maths.UnaryMaths(operation="bin"), + name="binarize_lower_volume", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) merge_thr_volume = pe.Node( - niu.Merge(1) + niu.Merge(1), + name="merge_thr_volume", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + + mod_ribbon_mean = pe.Node( + fsl.ImageStats(op_string='-M '), + name="mod_ribbon_mean", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + mod_ribbon_std = pe.Node( + fsl.ImageStats(op_string='-S '), + name="mod_ribbon_std", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + + merge_mod_ribbon_stats = pe.Node( + niu.Merge(2), + name="merge_mod_ribbon_stats", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + + merge_goodvoxels_operands = pe.Node( + niu.Merge(2), + name="merge_goodvoxels_operands", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) - filter_for_outliers = pe.Node( - fsl.maths.MultiImageMaths(), - op_string='-sub %s -mul -1' + goodvoxels_mask = pe.Node( + fsl.maths.MultiImageMaths(op_string='-thr %s, -bin, -sub %s -mul, -1'), + name="goodvoxels_mask", + mem_gb=mem_gb, ) ConvertNIFTItoFS = pe.Node( - fs.MRIConvert(ext='.mgz') + fs.MRIConvert(out_type='mgz'), + name="ConvertNIFTItoFS", + mem_gb=mem_gb, ) apply_goodvoxels_mask = pe.Node( - fs.ApplyMask() + fs.ApplyMask(), + name="apply_goodvoxels_mask", + mem_gb=mem_gb * 3, ) - - goodvoxels_sampler = pe.Node( + + goodvoxels_sampler = pe.MapNode( fs.SampleToSurface( cortex_mask=True, interp_method="trilinear", @@ -301,45 +504,73 @@ def _calc_lower_thr(in_stats): name="goodvoxels_sampler", mem_gb=mem_gb * 3, ) - sampler.inputs.hemi = ["lh", "rh"] - + goodvoxels_sampler.inputs.hemi = ["lh", "rh"] # fmt:off workflow.connect([ - (inputnode, medial_nans, [("subjects_dir", "subjects_dir")]), - (sampler, medial_nans, [("out_file", "in_file")]), - (medial_nans, sample_medial_wall_to_volume, [("out_file", "source_file")]), - (inputnode, sample_medial_wall_to_volume, [("source_file", "template_file")]), - (inputnode, sample_medial_wall_to_volume, [("subjects_dir", "subjects_dir")]), - (inputnode, sample_medial_wall_to_volume, [("subject_id", "subject_id")]), - (sample_medial_wall_to_volume, binarize_volume, [("out_file", "in_file")]), - (binarize_volume, mask_volume, [("out_file", "mask_file")]), - (inputnode, mask_volume, [("source_file", "in_file")]), - (mask_volume, ConvertFStoNIFTI, [("out_file", "in_file")]), - (ConvertFStoNIFTI, stdev_volume, [("out_file", "in_file")]), - (ConvertFStoNIFTI, mean_volume, [("out_file", "in_file")]), - (stdev_volume, merge_cov_inputs, [("out_file", "in1")]), - (merge_cov_inputs, cov_volume, [("out", "operand_files")]), - (mean_volume, cov_volume, [("out_file", "in_file")]), - (cov_volume, cov_mean, [("out_file", "in_file")]), - (cov_volume, cov_std, [("out_file", "in_file")]), - (cov_mean, merge_cov_stats, [("out_stat", "in1")]), - (cov_std, merge_cov_stats, [("out_stat", "in2")]), - (merge_cov_stats, upper_thr_val, [("out", "in_stats")]), - (merge_cov_stats, lower_thr_val, [("out", "in_stats")]), - (cov_volume, upper_thr_volume, [("out_file", "in_file")]), - (upper_thr_val, upper_thr_volume, [("upper_thresh", "thresh")]), - (cov_volume, lower_thr_volume, [("out_file", "in_file")]), - (lower_thr_val, lower_thr_volume, [("lower_thresh", "thresh")]), - (upper_thr_volume, binarize_upper_volume, [("out_file", "in_file")]), - (lower_thr_volume, binarize_lower_volume, [("out_file", "in_file")]), - (binarize_upper_volume, merge_thr_volume, [("out_file", "in1")]), - (merge_thr_volume, filter_for_outliers, [("out_file", "operand_files")]), - (binarize_lower_volume, filter_for_outliers, [("out_file", "in_file")]), - (filter_for_outliers, ConvertNIFTItoFS, [("out_file", "in_file")]), + # (inputnode, medial_nans, [("subjects_dir", "subjects_dir")]), + # (sampler, medial_nans, [("binary_file", "in_file")]), + # (medial_nans, sample_medial_wall_to_volume, [("out_file", "source_file")]), + # (inputnode, sample_medial_wall_to_volume, [("source_file", "template_file")]), + # (inputnode, sample_medial_wall_to_volume, [("subjects_dir", "subjects_dir")]), + # (inputnode, sample_medial_wall_to_volume, [("subject_id", "subject_id")]), + # (sample_medial_wall_to_volume, binarize_volume, [("out_file", "in_file")]), + # (binarize_volume, mask_volume, [("out_file", "mask_file")]), + # (inputnode, mask_volume, [("source_file", "in_file")]), + # (mask_volume, ConvertFStoNIFTI, [("out_file", "in_file")]), + # (ConvertFStoNIFTI, stdev_volume, [("out_file", "in_file")]), + # (ConvertFStoNIFTI, mean_volume, [("out_file", "in_file")]), + (rename_src, stdev_volume, [("out_file", "in_file")]), + (rename_src, mean_volume, [("out_file", "in_file")]), + (stdev_volume, cov_volume, [("out_file", "in_file")]), + (mean_volume, cov_volume, [("out_file", "operand_file")]), + (cov_volume, cov_ribbon, [("out_file", "in_file")]), + (combine_ribbon_vol_hemis, cov_ribbon, [("out_file", "mask_file")]), + (cov_ribbon, cov_ribbon_mean, [("out_file", "in_file")]), + (cov_ribbon, cov_ribbon_std, [("out_file", "in_file")]), + (cov_ribbon, cov_ribbon_norm, [("out_file", "in_file")]), + (cov_ribbon_mean, cov_ribbon_norm, [("out_file", "operand_file")]), + (cov_ribbon_norm, smooth_norm, [("out_file", "in_file")]), + (smooth_norm, merge_smooth_norm, [("out_file", "in1")]), + (cov_ribbon_norm, cov_ribbon_norm_smooth, [("out_file", "in_file")]), + (merge_smooth_norm, cov_ribbon_norm_smooth, [("out", "operand_files")]), + (cov_ribbon_mean, merge_cov_stats, [("out_file", "in1")]), + (cov_ribbon_norm_smooth, merge_cov_stats, [("out_file", "in2")]), + (merge_cov_stats, cov_norm_modulate[("out, operand_files")]), + (cov_volume, cov_norm_modulate, [("out_file, in_file")]), + (cov_norm_modulate, cov_norm_modulate_ribbon, [("out_file, in_file")]), + (combine_ribbon_vol_hemis, cov_norm_modulate_ribbon, [("out, mask_file")]), + (cov_norm_modulate_ribbon, mod_ribbon_mean, [("out_file", "in_file")]), + (cov_norm_modulate_ribbon, mod_ribbon_std, [("out_file", "in_file")]), + (mod_ribbon_mean, merge_mod_ribbon_stats, [("out_stat", "in1")]), + (mod_ribbon_std, merge_mod_ribbon_stats, [("out_stat", "in2")]), + (merge_mod_ribbon_stats, upper_thr_val, [("out, in_stats")]), + (merge_mod_ribbon_stats, lower_thr_val, [("out, in_stats")]), + (upper_thr_val, merge_goodvoxels_operands, [("out_thresh, in1")]), + (inputnode, merge_goodvoxels_operands, [("t1w_mask", "in2")]), + (cov_norm_modulate, goodvoxels_mask, [("out_file, in_file")]), + (merge_goodvoxels_operands, goodvoxels_mask, [("out_file, operand_files")]), + + + + + # (merge_cov_stats, lower_thr_val, [("out", "in_stats")]), + # (cov_ribbon, upper_thr_volume, [("out_file", "in_file")]), + # (upper_thr_val, upper_thr_volume, [("upper_thresh", "thresh")]), + # (cov_ribbon, lower_thr_volume, [("out_file", "in_file")]), + # (lower_thr_val, lower_thr_volume, [("lower_thresh", "thresh")]), + # (upper_thr_volume, binarize_upper_volume, [("out_file", "in_file")]), + # (lower_thr_volume, binarize_lower_volume, [("out_file", "in_file")]), + # (binarize_upper_volume, merge_thr_volume, [("out_file", "in1")]), + # (merge_thr_volume, filter_for_outliers, [("out_file", "operand_files")]), + # (binarize_lower_volume, filter_for_outliers, [("out_file", "in_file")]), + # (filter_for_outliers, ConvertNIFTItoFS, [("out_file", "in_file")]), + (goodvoxels_mask, ConvertNIFTItoFS, [("out_file", "in_file")]), (ConvertNIFTItoFS, apply_goodvoxels_mask, [("out_file", "mask_file")]), - (mask_volume, apply_goodvoxels_mask, [("out_file", "in_file")]), - (apply_goodvoxels_mask, medial_nans, [("out_file", "in_file")]), - (goodvoxels_sampler, update_metadata, [("out_file", "in_file")]) + (rename_src, apply_goodvoxels_mask, [("out_file", "in_file")]), + (apply_goodvoxels_mask, sampler, [("out_file", "in_file")]), + (sampler, update_metadata, [("out_file", "in_file")]), + (update_metadata, outputnode, [("out_file", "surfaces")]), + (itersource, outputnode, [("target", "target")]), ]) # fmt:on return workflow From 3041e25f149fd6c8e7af6089e6d64b21de835d98 Mon Sep 17 00:00:00 2001 From: madisoth <56737848+madisoth@users.noreply.github.com> Date: Wed, 14 Sep 2022 13:55:58 -0700 Subject: [PATCH 06/14] progress on workbench / gifti surface wf --- docs/workflows.rst | 15 +- fmriprep/cli/parser.py | 18 + fmriprep/config.py | 5 + fmriprep/data/tests/config.toml | 2 + fmriprep/interfaces/metric.py | 305 ++++++++++++++++ fmriprep/interfaces/sphereproject.py | 69 ++++ fmriprep/interfaces/volume.py | 165 +++++++++ fmriprep/workflows/bold/base.py | 8 +- fmriprep/workflows/bold/resampling.py | 504 ++++++++++++++++---------- 9 files changed, 893 insertions(+), 198 deletions(-) create mode 100644 fmriprep/interfaces/metric.py create mode 100644 fmriprep/interfaces/sphereproject.py create mode 100644 fmriprep/interfaces/volume.py diff --git a/docs/workflows.rst b/docs/workflows.rst index 2cd26c867..5b42bce9e 100644 --- a/docs/workflows.rst +++ b/docs/workflows.rst @@ -478,7 +478,9 @@ EPI sampled to FreeSurfer surfaces wf = init_bold_surf_wf( mem_gb=1, surface_spaces=['fsnative', 'fsaverage5'], - medial_surface_nan=False) + medial_surface_nan=False, + project_goodvoxels=False, + surface_sampler="fs") If FreeSurfer processing is enabled, the motion-corrected functional series (after single shot resampling to T1w space) is sampled to the @@ -486,8 +488,19 @@ surface by averaging across the cortical ribbon. Specifically, at each vertex, the segment normal to the white-matter surface, extending to the pial surface, is sampled at 6 intervals and averaged. +With ``--surface-sampler wb``, Workbench wb_command -volume-to-surface-mapping +(with -ribbon-constrained) is used for surface sampling instead of the default +FreeSurfer method described above. + Surfaces are generated for the "subject native" surface, as well as transformed to the ``fsaverage`` template space. + +The flag ``--project_goodvoxels``, when enabled, excludes voxels whose timeseries have +locally high coefficient of variation from the sampling to surface, by similar process +to `HCP Pipelines_`. + + + All surface outputs are in GIFTI format. HCP Grayordinates diff --git a/fmriprep/cli/parser.py b/fmriprep/cli/parser.py index f4afaa843..9def95db5 100644 --- a/fmriprep/cli/parser.py +++ b/fmriprep/cli/parser.py @@ -363,6 +363,24 @@ def _slice_time_ref(value, parser): help="Replace medial wall values with NaNs on functional GIFTI files. Only " "performed for GIFTI files mapped to a freesurfer subject (fsaverage or fsnative).", ) + g_conf.add_argument( + "--project-goodvoxels", + required=False, + action="store_true", + default=False, + help="Exclude voxels whose timeseries have locally high coefficient of variation " + "from surface resampling. Only performed for GIFTI files mapped to a freesurfer subject " + "(fsaverage or fsnative).", + ) + g_conf.add_argument( + "--surface-sampler", + action="store", + choices=("fs", "wb"), + default="fs", + help="select sampling method used for projection of BOLD timeseries to surface. " + "'fs' to use FreeSurfer mri_vol2surf (default). " + "'wb' to use Workbench wb_command -volume-to-surface-mapping.", + ) g_conf.add_argument( "--slice-time-ref", required=False, diff --git a/fmriprep/config.py b/fmriprep/config.py index e17ab61d1..68e87318c 100644 --- a/fmriprep/config.py +++ b/fmriprep/config.py @@ -546,6 +546,11 @@ class workflow(_Config): """Run FreeSurfer ``recon-all`` with the ``-logitudinal`` flag.""" medial_surface_nan = None """Fill medial surface with :abbr:`NaNs (not-a-number)` when sampling.""" + project_goodvoxels = False + """Exclude voxels with locally high coefficient of variation from sampling.""" + surface_sampler = "fs" + """``fs`` (default) or ``wb`` to select FreeSurfer-based or Workbench-based + method for sampling BOLD to surface.""" regressors_all_comps = None """Return all CompCor components.""" regressors_dvars_th = None diff --git a/fmriprep/data/tests/config.toml b/fmriprep/data/tests/config.toml index 7c525cda6..097a974d0 100644 --- a/fmriprep/data/tests/config.toml +++ b/fmriprep/data/tests/config.toml @@ -39,6 +39,8 @@ hires = true ignore = [] longitudinal = false medial_surface_nan = false +project_goodvoxels = false +surface_sampler = "fs" regressors_all_comps = false regressors_dvars_th = 1.5 regressors_fd_th = 0.5 diff --git a/fmriprep/interfaces/metric.py b/fmriprep/interfaces/metric.py new file mode 100644 index 000000000..16d699c43 --- /dev/null +++ b/fmriprep/interfaces/metric.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""This module provides interfaces for workbench surface commands""" +import os + +from nipype.interfaces.base import TraitedSpec, File, traits, CommandLineInputSpec +from nipype.interfaces.workbench.base import WBCommand +from nipype import logging + +iflogger = logging.getLogger("nipype.interface") + + +class MetricDilateInputSpec(CommandLineInputSpec): + + in_file = File( + exists=True, + mandatory=True, + argstr="%s ", + position=0, + desc="the metric to dilate", + ) + + surface = File( + exists=True, + mandatory=True, + argstr="%s ", + position=1, + desc="the surface to compute on", + ) + + distance = traits.Float( + mandatory=True, + argstr="%f ", + position=2, + desc="distance in mm to dilate", + ) + + out_file = File( + name_source=["in_file"], + name_template="%s.func.gii", + keep_extension=False, + argstr="%s ", + position=3, + desc="output - the output metric", + ) + + bad_vertex_roi = File( + argstr="-bad-vertex-roi %s ", + position=4, + desc="metric file, positive values denote vertices to have their values replaced", + ) + + data_roi = File( + argstr="-data-roi %s ", + position=5, + desc="metric file, positive values denote vertices that have data", + ) + + column = traits.Int( + position=6, + argstr="-column %d ", + desc="the column number", + ) + + nearest = traits.Bool( + position=7, + argstr="-nearest ", + desc="use the nearest good value instead of a weighted average", + ) + + linear = traits.Bool( + position=8, + argstr="-linear ", + desc="fill in values with linear interpolation along strongest gradient", + ) + + exponent = traits.Float( + argstr="-exponent %f ", + position=9, + default=6.0, + desc="exponent n to use in (area / (distance ^ n)) as the " + "weighting function (default 6)", + ) + + corrected_areas = File( + argstr="-corrected-areas %s ", + position=10, + desc="vertex areas to use instead of computing them from the surface", + ) + + legacy_cutoff = traits.Bool( + position=11, + argstr="-legacy-cutoff ", + desc="use the v1.3.2 method of choosing how many vertices to " + "use when calulating the dilated value with weighted method", + ) + + +class MetricDilateOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="output file") + + +class MetricDilate(WBCommand): + """ + For all data values designated as bad, if they neighbor a good value or + are within the specified distance of a good value in the same kind of + model, replace the value with a distance weighted average of nearby good + values, otherwise set the value to zero. If -nearest is specified, it + will use the value from the closest good value within range instead of a + weighted average. When the input file contains label data, nearest + dilation is used on the surface, and weighted popularity is used in the + volume. + + The -corrected-areas options are intended for dilating on group average + surfaces, but it is only an approximate correction for the reduction of + structure in a group average surface. + + If -bad-vertex-roi is specified, all values, including those with + value zero, are good, except for locations with a positive value in the + ROI. If it is not specified, only values equal to zero are bad. + + >>> from nipype.interfaces.workbench import MetricDilate + >>> metdil = MetricDilate() + >>> metdil.inputs.in_file = 'sub-01_task-rest_bold_space-fsaverage5.L.func.gii' + >>> metdil.inputs.surface = 'sub-01.L.midthickness.surf.gii' + >>> metdil.inputs.distance = 10 + >>> metdil.inputs.nearest = True + >>> metdil.cmdline + 'wb_command -metric-dilate \ + sub-01_task-rest_bold_space-fsaverage5.L.func.gii \ + sub-01.L.midthickness.surf.gii \ + 10 \ + sub-01_task-rest_bold_space-fsaverage5.L.func.gii \ + -nearest' + """ + input_spec = MetricDilateInputSpec + output_spec = MetricDilateOutputSpec + _cmd = "wb_command -metric-dilate " + + +class MetricResampleInputSpec(CommandLineInputSpec): + in_file = File( + exists=True, + mandatory=True, + argstr="%s", + position=0, + desc="The metric file to resample", + ) + current_sphere = File( + exists=True, + mandatory=True, + argstr="%s", + position=1, + desc="A sphere surface with the mesh that the metric is currently on", + ) + new_sphere = File( + exists=True, + mandatory=True, + argstr="%s", + position=2, + desc="A sphere surface that is in register with and" + " has the desired output mesh", + ) + method = traits.Enum( + "ADAP_BARY_AREA", + "BARYCENTRIC", + argstr="%s", + mandatory=True, + position=3, + desc="The method name - ADAP_BARY_AREA method is recommended for" + " ordinary metric data, because it should use all data while" + " downsampling, unlike BARYCENTRIC. If ADAP_BARY_AREA is used," + " exactly one of area_surfs or area_metrics must be specified", + ) + out_file = File( + name_source=["new_sphere"], + name_template="%s.out", + keep_extension=True, + argstr="%s", + position=4, + desc="The output metric", + ) + area_surfs = traits.Bool( + position=5, + argstr="-area-surfs", + xor=["area_metrics"], + desc="Specify surfaces to do vertex area correction based on", + ) + area_metrics = traits.Bool( + position=5, + argstr="-area-metrics", + xor=["area_surfs"], + desc="Specify vertex area metrics to do area correction based on", + ) + current_area = File( + exists=True, + position=6, + argstr="%s", + desc="A relevant anatomical surface with mesh OR" + " a metric file with vertex areas for mesh", + ) + new_area = File( + exists=True, + position=7, + argstr="%s", + desc="A relevant anatomical surface with mesh OR" + " a metric file with vertex areas for mesh", + ) + roi_metric = File( + exists=True, + position=8, + argstr="-current-roi %s", + desc="Input roi on the current mesh used to exclude non-data vertices", + ) + valid_roi_out = traits.Bool( + position=9, + argstr="-valid-roi-out", + desc="Output the ROI of vertices that got data from valid source vertices", + ) + largest = traits.Bool( + position=10, + argstr="-largest", + desc="Use only the value of the vertex with the largest weight", + ) + + +class MetricResampleOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="the output metric") + roi_file = File(desc="ROI of vertices that got data from valid source vertices") + + +class MetricResample(WBCommand): + """ + Resample a metric file to a different mesh + + Resamples a metric file, given two spherical surfaces that are in + register. If ``ADAP_BARY_AREA`` is used, exactly one of -area-surfs or + ``-area-metrics`` must be specified. + + The ``ADAP_BARY_AREA`` method is recommended for ordinary metric data, + because it should use all data while downsampling, unlike ``BARYCENTRIC``. + The recommended areas option for most data is individual midthicknesses + for individual data, and averaged vertex area metrics from individual + midthicknesses for group average data. + + The ``-current-roi`` option only masks the input, the output may be slightly + dilated in comparison, consider using ``-metric-mask`` on the output when + using ``-current-roi``. + + The ``-largest option`` results in nearest vertex behavior when used with + ``BARYCENTRIC``. When resampling a binary metric, consider thresholding at + 0.5 after resampling rather than using ``-largest``. + + >>> from nipype.interfaces.workbench import MetricResample + >>> metres = MetricResample() + >>> metres.inputs.in_file = 'sub-01_task-rest_bold_space-fsaverage5.L.func.gii' + >>> metres.inputs.method = 'ADAP_BARY_AREA' + >>> metres.inputs.current_sphere = 'fsaverage5_std_sphere.L.10k_fsavg_L.surf.gii' + >>> metres.inputs.new_sphere = 'fs_LR-deformed_to-fsaverage.L.sphere.32k_fs_LR.surf.gii' + >>> metres.inputs.area_metrics = True + >>> metres.inputs.current_area = 'fsaverage5.L.midthickness_va_avg.10k_fsavg_L.shape.gii' + >>> metres.inputs.new_area = 'fs_LR.L.midthickness_va_avg.32k_fs_LR.shape.gii' + >>> metres.cmdline + 'wb_command -metric-resample sub-01_task-rest_bold_space-fsaverage5.L.func.gii \ + fsaverage5_std_sphere.L.10k_fsavg_L.surf.gii \ + fs_LR-deformed_to-fsaverage.L.sphere.32k_fs_LR.surf.gii \ + ADAP_BARY_AREA fs_LR-deformed_to-fsaverage.L.sphere.32k_fs_LR.surf.out \ + -area-metrics fsaverage5.L.midthickness_va_avg.10k_fsavg_L.shape.gii \ + fs_LR.L.midthickness_va_avg.32k_fs_LR.shape.gii' + """ + + input_spec = MetricResampleInputSpec + output_spec = MetricResampleOutputSpec + _cmd = "wb_command -metric-resample" + + def _format_arg(self, opt, spec, val): + if opt in ["current_area", "new_area"]: + if not self.inputs.area_surfs and not self.inputs.area_metrics: + raise ValueError( + "{} was set but neither area_surfs or" + " area_metrics were set".format(opt) + ) + if opt == "method": + if ( + val == "ADAP_BARY_AREA" + and not self.inputs.area_surfs + and not self.inputs.area_metrics + ): + raise ValueError( + "Exactly one of area_surfs or area_metrics" " must be specified" + ) + if opt == "valid_roi_out" and val: + # generate a filename and add it to argstr + roi_out = self._gen_filename(self.inputs.in_file, suffix="_roi") + iflogger.info("Setting roi output file as", roi_out) + spec.argstr += " " + roi_out + return super(MetricResample, self)._format_arg(opt, spec, val) + + def _list_outputs(self): + outputs = super(MetricResample, self)._list_outputs() + if self.inputs.valid_roi_out: + roi_file = self._gen_filename(self.inputs.in_file, suffix="_roi") + outputs["roi_file"] = os.path.abspath(roi_file) + return outputs diff --git a/fmriprep/interfaces/sphereproject.py b/fmriprep/interfaces/sphereproject.py new file mode 100644 index 000000000..b2348238c --- /dev/null +++ b/fmriprep/interfaces/sphereproject.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""This module provides interfaces for workbench sphere projection commands""" + +# from distutils.cmd import Command +from signal import valid_signals +from nipype.interfaces.workbench.base import WBCommand +from nipype.interfaces.base import ( + TraitedSpec, + File, + traits, + CommandLineInputSpec, + SimpleInterface, +) +from nipype import logging + + +class SurfaceSphereProjectUnprojectInputSpec(CommandLineInputSpec): + + in_file = File( + exists=True, + mandatory=True, + argstr="%s ", + position=0, + desc="a sphere with the desired output mesh", + ) + + sphere_project_to = File( + exists=True, + mandatory=True, + argstr="%s ", + position=1, + desc="a sphere that aligns with sphere-in", + ) + + sphere_unproject_from = File( + exists=True, + mandatory=True, + argstr="%s ", + position=2, + desc="deformed to the desired output space", + ) + + out_file = File( + name_source="in_file", + name_template="%s_deformed.surf.gii ", + keep_extension=False, + argstr="%s", + position=3, + desc="The sphere output file", + ) + + +class SurfaceSphereProjectUnprojectOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="output file") + + +class SurfaceSphereProjectUnproject(WBCommand): + # COPY REGISTRATION DEFORMATIONS TO DIFFERENT SPHERE + # wb_command -surface-sphere-project-unproject + # - a sphere with the desired output mesh + # - a sphere that aligns with sphere-in + # - deformed to the desired output space + # - output - the output sphere + + input_spec = SurfaceSphereProjectUnprojectInputSpec + output_spec = SurfaceSphereProjectUnprojectOutputSpec + _cmd = "wb_command -surface-sphere-project-unproject " \ No newline at end of file diff --git a/fmriprep/interfaces/volume.py b/fmriprep/interfaces/volume.py new file mode 100644 index 000000000..6d8e19386 --- /dev/null +++ b/fmriprep/interfaces/volume.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""This module provides interfaces for workbench volume commands""" +import os + +from nipype.interfaces.base import TraitedSpec, File, traits, CommandLineInputSpec +from nipype.interfaces.workbench.base import WBCommand +from nipype import logging + +iflogger = logging.getLogger("nipype.interface") + + +class CreateSignedDistanceVolumeInputSpec(CommandLineInputSpec): + surface = File( + exists=True, + mandatory=True, + argstr="%s ", + position=0, + desc="the input surface", + ) + ref_space = File( + exists=True, + mandatory=True, + argstr="%s ", + position=1, + desc="a volume in the desired output space (dims, spacing, origin)", + ) + out_vol = File( + name_source=["surface"], + name_template="%s.distvol.nii.gz", + argstr="%s ", + position=2, + desc="output - the output volume", + ) + roi_out = File( + name_source=["surface"], + name_template="%s.distvolroi.nii.gz", + argstr="-roi-out %s ", + position=3, + desc="output roi volume of where the output has a computed value", + ) + fill_value = traits.Float( + mandatory=False, + argstr="-fill-value %f ", + position=4, + desc="specify a value to put in all voxels that don't get assigned a distance, default 0", + ) + exact_limit = traits.Float( + mandatory=False, + argstr="-exact-limit %f ", + position=5, + desc="specify distance for exact output in mm, default 5", + ) + approx_limit = traits.Float( + mandatory=False, + argstr="-approx-limit %f ", + position=6, + desc="specify distance for approximate output in mm, default 20", + ) + approx_neighborhood = traits.Int( + mandatory=False, + argstr="-approx-neighborhood %d ", + position=7, + desc="size of neighborhood cube measured from center to face, default 2 = 5x5x5", + ) + winding_method = traits.Enum( + "EVEN_ODD", + "NEGATIVE", + "NONZERO", + "NORMALS", + argstr="-winding %s ", + usedefault=True, + position=8, + desc="winding method for point inside surface test, choices: " + "EVEN_ODD (default) " + "NEGATIVE " + "NONZERO " + "NORMALS " + ) + + +class CreateSignedDistanceVolumeOutputSpec(TraitedSpec): + out_vol = File(exists=True, desc="output - the output volume") + roi_out = File(desc="output roi volume of where the output has a computed value") + + +class CreateSignedDistanceVolume(WBCommand): + """ +CREATE SIGNED DISTANCE VOLUME FROM SURFACE + wb_command -create-signed-distance-volume + - the input surface + - a volume in the desired output space (dims, spacing, origin) + - output - the output volume + + [-roi-out] - output an roi volume of where the output has a computed + value + - output - the output roi volume + + [-fill-value] - specify a value to put in all voxels that don't get + assigned a distance + - value to fill with (default 0) + + [-exact-limit] - specify distance for exact output + - distance in mm (default 5) + + [-approx-limit] - specify distance for approximate output + - distance in mm (default 20) + + [-approx-neighborhood] - voxel neighborhood for approximate calculation + - size of neighborhood cube measured from center to face, in + voxels (default 2 = 5x5x5) + + [-winding] - winding method for point inside surface test + - name of the method (default EVEN_ODD) + + Computes the signed distance function of the surface. Exact distance is + calculated by finding the closest point on any surface triangle to the + center of the voxel. Approximate distance is calculated starting with + these distances, using dijkstra's method with a neighborhood of voxels. + Specifying too small of an exact distance may produce unexpected results. + Valid specifiers for winding methods are as follows: + + EVEN_ODD (default) + NEGATIVE + NONZERO + NORMALS + + The NORMALS method uses the normals of triangles and edges, or the + closest triangle hit by a ray from the point. This method may be + slightly faster, but is only reliable for a closed surface that does not + cross through itself. All other methods count entry (positive) and exit + (negative) crossings of a vertical ray from the point, then counts as + inside if the total is odd, negative, or nonzero, respectively. + + >>> from nipype.interfaces.workbench import CreateSignedDistanceVolume + >>> distvol = CreateSignedDistanceVolume() + >>> distvol.inputs.surface = 'sub-01.L.pial.native.surf.gii' + >>> distvol.inputs.refspace = 'sub-01_T1w.nii.gz' + >>> distvol.inputs.out_vol = 'sub-01.L.pial.native.surf.distvol.nii.gz' + >>> distvol.inputs.roi_out = 'sub-01.L.pial.native.surf.distvolroi.nii.gz' + >>> distvol.inputs.fill_value = 0 + >>> distvol.inputs.exact_limit = 5 + >>> distvol.inputs.approx_limit = 20 + >>> distvol.inputs.approx_neighborhood = 2 + >>> distvol.inputs.winding_method = 'EVEN_ODD' + >>> distvol.cmdline + 'wb_command -create-signed-distance-volume sub-01.L.pial.native.surf.gii \ + sub-01_T1w.nii.gz \ + sub-01.L.pial.native.surf.distvol.nii.gz \ + -roi-out sub-01.L.pial.native.surf.distvolroi.nii.gz \ + -fill-value 0 \ + -exact-limit 5 \ + -approx-limit 20 \ + -approx-neighborhood 2 \ + -winding EVEN_ODD' + """ + + input_spec = CreateSignedDistanceVolumeInputSpec + output_spec = CreateSignedDistanceVolumeOutputSpec + _cmd = "wb_command -create-signed-distance-volume" + + def _list_outputs(self): + outputs = super(CreateSignedDistanceVolume, self)._list_outputs() + return outputs diff --git a/fmriprep/workflows/bold/base.py b/fmriprep/workflows/bold/base.py index 9b2055ae6..60900cb91 100644 --- a/fmriprep/workflows/bold/base.py +++ b/fmriprep/workflows/bold/base.py @@ -351,7 +351,7 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): "anat2std_xfm", "std2anat_xfm", "template", - "anat_giftis" + "anat_giftis", "t1w2fsnative_xfm", "fsnative2t1w_xfm", "fmap", @@ -947,6 +947,8 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): mem_gb=mem_gb["resampled"], surface_spaces=freesurfer_spaces, medial_surface_nan=config.workflow.medial_surface_nan, + project_goodvoxels=config.workflow.project_goodvoxels, + surface_sampler=config.workflow.surface_sampler, name="bold_surf_wf", ) # fmt:off @@ -956,8 +958,10 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): ("subject_id", "inputnode.subject_id"), ("t1w2fsnative_xfm", "inputnode.t1w2fsnative_xfm"), ("anat_giftis", "inputnode.anat_giftis"), - ("t1w_mask", "inputnode.t1w_mask") + ("t1w_mask", "inputnode.t1w_mask"), ]), + (bold_reg_wf, bold_surf_wf, [("outputnode.itk_bold_to_t1", + "inputnode.itk_bold_to_t1")]), (bold_t1_trans_wf, bold_surf_wf, [("outputnode.bold_t1", "inputnode.source_file")]), (bold_surf_wf, outputnode, [("outputnode.surfaces", "surfaces")]), (bold_surf_wf, func_derivatives_wf, [("outputnode.target", "inputnode.surf_refs")]), diff --git a/fmriprep/workflows/bold/resampling.py b/fmriprep/workflows/bold/resampling.py index 6dbad6c5b..f3a3f828e 100644 --- a/fmriprep/workflows/bold/resampling.py +++ b/fmriprep/workflows/bold/resampling.py @@ -36,14 +36,31 @@ from nipype.pipeline import engine as pe from nipype.interfaces import utility as niu, freesurfer as fs, fsl import nipype.interfaces.workbench as wb +from niworkflows.interfaces.freesurfer import MakeMidthickness +from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms +from ...interfaces.volume import CreateSignedDistanceVolume +from ...interfaces.metric import MetricDilate -def init_bold_surf_wf(mem_gb, surface_spaces, medial_surface_nan, name="bold_surf_wf"): + +def init_bold_surf_wf(mem_gb, + surface_spaces, + medial_surface_nan, + project_goodvoxels, + name="bold_surf_wf"): """ Sample functional images to FreeSurfer surfaces. For each vertex, the cortical ribbon is sampled at six points (spaced 20% of thickness apart) and averaged. + + If --surface-sampler wb is used, Workbench's wb_command -volume-to-surface-mapping + with -ribbon-constrained option is used instead of the default FreeSurfer mri_vol2surf. + Note that unlike HCP, no additional spatial smoothing is applied to the surface-projected + data. + + If --project-goodvoxels is used, a "goodvoxels" BOLD mask, as described in [@hcppipelines], + is generated and applied to the functional image before sampling to surface. Outputs are in GIFTI format. Workflow Graph @@ -53,9 +70,11 @@ def init_bold_surf_wf(mem_gb, surface_spaces, medial_surface_nan, name="bold_sur from fmriprep.workflows.bold import init_bold_surf_wf wf = init_bold_surf_wf(mem_gb=0.1, - surface_spaces=["fsnative", "fsaverage5"], - medial_surface_nan=False) - + surface_spaces=['fsnative', 'fsaverage5'], + medial_surface_nan=False, + project_goodvoxels=False, + surface_sampler="fs") + Parameters ---------- surface_spaces : :obj:`list` @@ -65,19 +84,29 @@ def init_bold_surf_wf(mem_gb, surface_spaces, medial_surface_nan, name="bold_sur native surface. medial_surface_nan : :obj:`bool` Replace medial wall values with NaNs on functional GIFTI files + project_goodvoxels : :obj:`bool` + Exclude voxels with locally high coefficient of variation, or that lie outside the + cortical surfaces, from the surface projection. + surface_sampler : :obj:`str` + 'fs' (default) or 'wb' to specify FreeSurfer-based or Workbench-based + volume to surface mapping Inputs ------ source_file Motion-corrected BOLD series in T1 space - t1w_preproc - Bias-corrected structural template image + t1w_mask + Mask of the skull-stripped T1w image subjects_dir FreeSurfer SUBJECTS_DIR subject_id FreeSurfer subject ID t1w2fsnative_xfm LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space + itk_bold_to_t1 + Affine transform from ``ref_bold_brain`` to T1 space (ITK format) + anat_giftis + GIFTI anatomical surfaces in T1w space Outputs ------- @@ -86,11 +115,9 @@ def init_bold_surf_wf(mem_gb, surface_spaces, medial_surface_nan, name="bold_sur """ from nipype.interfaces.io import FreeSurferSource - from nipype.interfaces.workbench import CreateSignedDistanceVolume from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.surf import GiftiSetAnatomicalStructure - workflow = Workflow(name=name) workflow.__desc__ = """\ The BOLD time-series were resampled onto the following surfaces @@ -100,17 +127,27 @@ def init_bold_surf_wf(mem_gb, surface_spaces, medial_surface_nan, name="bold_sur out_spaces=", ".join(["*%s*" % s for s in surface_spaces]) ) + if project_goodvoxels: + workflow.__desc__ += """\ +Before resampling, a "goodvoxels" mask [@hcppipelines] was applied, +excluding voxels whose time-series have a locally high coefficient of +variation, or that lie outside the cortical surfaces, +from the surface projection. +""" + inputnode = pe.Node( niu.IdentityInterface( fields=["source_file", "subject_id", "subjects_dir", "t1w2fsnative_xfm", + "itk_bold_to_t1", "anat_giftis", "t1w_mask"] ), name="inputnode", ) + itersource = pe.Node(niu.IdentityInterface(fields=["target"]), name="itersource") itersource.iterables = [("target", surface_spaces)] @@ -136,9 +173,11 @@ def select_target(subject_id, space): run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB, ) + itk2lta = pe.Node( niu.Function(function=_itk2lta), name="itk2lta", run_without_submitting=True ) + sampler = pe.MapNode( fs.SampleToSurface( cortex_mask=True, @@ -149,18 +188,15 @@ def select_target(subject_id, space): sampling_range=(0, 1, 0.2), sampling_units="frac", ), + name_source=['source_file'], + keep_extension=False, + name_template='%s.func.gii', iterfield=["hemi"], name="sampler", mem_gb=mem_gb * 3, ) sampler.inputs.hemi = ["lh", "rh"] - sample_medial_wall_to_volume = pe.MapNode( - fs.Surface2VolTransform(), - iterfield=["hemi", "source_file"], - name="sample_medial_wall_to_volume", - mem_gb=mem_gb * 3, - ) - sample_medial_wall_to_volume.inputs.hemi = ["lh", "rh"] + update_metadata = pe.MapNode( GiftiSetAnatomicalStructure(), iterfield=["in_file"], @@ -173,29 +209,75 @@ def select_target(subject_id, space): joinsource="itersource", name="outputnode", ) + + if not project_goodvoxels: + # fmt: off + workflow.connect([ + (inputnode, get_fsnative, [('subject_id', 'subject_id'), + ('subjects_dir', 'subjects_dir')]), + (inputnode, targets, [('subject_id', 'subject_id')]), + (inputnode, rename_src, [('source_file', 'in_file')]), + (inputnode, itk2lta, [('source_file', 'src_file'), + ('t1w2fsnative_xfm', 'in_file')]), + (get_fsnative, itk2lta, [('T1', 'dst_file')]), + (inputnode, sampler, [('subjects_dir', 'subjects_dir'), + ('subject_id', 'subject_id')]), + (itersource, targets, [('target', 'space')]), + (itersource, rename_src, [('target', 'subject')]), + (itk2lta, sampler, [('out', 'reg_file')]), + (targets, sampler, [('out', 'target_subject')]), + (rename_src, sampler, [('out_file', 'source_file')]), + (update_metadata, outputnode, [('out_file', 'surfaces')]), + (itersource, outputnode, [('target', 'target')]), + ]) + # fmt: on - # [0, 1, 2, 3] = lh wm, rh wm, lh pial, rh pial + if not medial_surface_nan: + workflow.connect(sampler, "out_file", update_metadata, "in_file") + return workflow + + from niworkflows.interfaces.freesurfer import MedialNaNs + + # Refine if medial vertices should be NaNs + medial_nans = pe.MapNode( + MedialNaNs(), iterfield=["in_file"], name="medial_nans", mem_gb=DEFAULT_MEMORY_MIN_GB + ) + + # fmt: off + workflow.connect([ + (inputnode, medial_nans, [('subjects_dir', 'subjects_dir')]), + (sampler, medial_nans, [('out_file', 'in_file')]), + (medial_nans, update_metadata, [('out_file', 'in_file')]), + ]) + # fmt: on + return workflow + + # 0, 1, 2, 3, 6, 7 = lh wm, rh wm, lh pial, rh pial, lh mid, rh mid select_wm = pe.Node( - niu.Select(), - index=[0, 1], - name="select_wm_pial", + niu.Select(index=[0, 1]), + name="select_wm", mem_gb=DEFAULT_MEMORY_MIN_GB, ) - + select_pial = pe.Node( - niu.Select(), - index=[2, 3], + niu.Select(index=[2, 3]), name="select_pial", mem_gb=DEFAULT_MEMORY_MIN_GB, ) + select_midthick = pe.Node( + niu.Select(index=[6, 7]), + name="select_midthick", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + create_wm_distvol = pe.MapNode( CreateSignedDistanceVolume(), iterfield=["surface"], name="create_wm_distvol", mem_gb=mem_gb, ) - + create_pial_distvol = pe.MapNode( CreateSignedDistanceVolume(), iterfield=["surface"], @@ -213,17 +295,17 @@ def select_target(subject_id, space): uthresh_pial_distvol = pe.MapNode( fsl.maths.MathsCommand(args="-uthr 0 -abs -bin -mul 255"), iterfield=["in_file"], - name="thresh_pial_distvol", + name="uthresh_pial_distvol", mem_gb=DEFAULT_MEMORY_MIN_GB, ) bin_wm_distvol = pe.MapNode( fsl.maths.UnaryMaths(operation="bin"), iterfield=["in_file"], - name="bin_threshed_distvol", + name="bin_wm_distvol", mem_gb=DEFAULT_MEMORY_MIN_GB, ) - + bin_pial_distvol = pe.MapNode( fsl.maths.UnaryMaths(operation="bin"), iterfield=["in_file"], @@ -236,6 +318,7 @@ def select_target(subject_id, space): name="split_wm_distvol", mem_gb=DEFAULT_MEMORY_MIN_GB, ) + merge_wm_distvol_no_flatten = pe.Node( niu.Merge(2), no_flatten=True, @@ -244,147 +327,107 @@ def select_target(subject_id, space): ) make_ribbon_vol = pe.MapNode( - fsl.maths.MultiImageMaths(op_string="-mas %s -mul 255"), + fsl.maths.MultiImageMaths(op_string="-mas %s -mul 255 "), iterfield=["in_file", "operand_files"], name="make_ribbon_vol", mem_gb=DEFAULT_MEMORY_MIN_GB, ) + bin_ribbon_vol = pe.MapNode( fsl.maths.UnaryMaths(operation="bin"), iterfield=["in_file"], name="bin_ribbon_vol", mem_gb=DEFAULT_MEMORY_MIN_GB, ) + split_squeeze_ribbon_vol = pe.Node( niu.Split(splits=[1, 1], squeeze=True), name="split_squeeze_ribbon", mem_gb=DEFAULT_MEMORY_MIN_GB, ) + combine_ribbon_vol_hemis = pe.Node( fsl.maths.BinaryMaths(operation="add"), name="combine_ribbon_vol_hemis", mem_gb=DEFAULT_MEMORY_MIN_GB, ) - # fmt:off - workflow.connect([ - (inputnode, select_wm, [("anat_giftis", "inlist")]), - (inputnode, select_pial, [("anat_giftis", "inlist")]), - (select_wm, create_wm_distvol, [("out", "surface")]), - (inputnode, create_wm_distvol, [("t1w_mask", "ref_space")]), - (select_pial, create_pial_distvol, [("out", "surface")]), - (inputnode, create_pial_distvol, [("t1w_mask", "ref_space")]), - (create_wm_distvol, thresh_wm_distvol, [("out_vol", "in_file")]), - (create_pial_distvol, uthresh_pial_distvol, [("out_vol", "in_file")]), - (thresh_wm_distvol, bin_wm_distvol, [("t1w_mask", "ref_space")]), - (uthresh_pial_distvol, bin_pial_distvol, [("out_vol", "in_file")]), - (bin_wm_distvol, split_wm_distvol, [("out_vol", "inlist")]), - (split_wm_distvol, merge_wm_distvol_no_flatten, [("out1", "in1")]), - (split_wm_distvol, merge_wm_distvol_no_flatten, [("out2", "in2")]), - (bin_pial_distvol, make_ribbon_vol, [("out_file", "in_file")]), - (merge_wm_distvol_no_flatten, make_ribbon_vol, [("out", "operand_files")]), - (make_ribbon_vol, bin_ribbon_vol, [("out_file", "in_file")]), - (bin_ribbon_vol, split_squeeze_ribbon_vol, "out_file", "inlist"), - (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [("out1", "in_file")]) - (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [("out2", "operand_file")]), - ]) - workflow.connect([ - (inputnode, get_fsnative, [("subject_id", "subject_id"), - ("subjects_dir", "subjects_dir")]), - (inputnode, targets, [("subject_id", "subject_id")]), - (inputnode, rename_src, [("source_file", "in_file")]), - (inputnode, itk2lta, [("source_file", "src_file"), - ("t1w2fsnative_xfm", "in_file")]), - (get_fsnative, itk2lta, [("T1", "dst_file")]), - (inputnode, sampler, [("subjects_dir", "subjects_dir"), - ("subject_id", "subject_id")]), - (itersource, targets, [("target", "space")]), - (itersource, rename_src, [("target", "subject")]), - (itk2lta, sampler, [("out", "reg_file")]), - (targets, sampler, [("out", "target_subject")]), - # (rename_src, sampler, [("out_file", "source_file")]), - # (update_metadata, outputnode, [("out_file", "surfaces")]), - # (itersource, outputnode, [("target", "target")]), - ]) - # fmt:on - if not medial_surface_nan: - workflow.connect(sampler, "out_file", update_metadata, "in_file") - return workflow - - from niworkflows.interfaces.freesurfer import MedialNaNs + ribbon_boldsrc_xfm = pe.Node( + ApplyTransforms(interpolation='MultiLabel', + transforms='identity'), + name="ribbon_boldsrc_xfm", + mem_gb=mem_gb, + ) - # Refine if medial vertices should be NaNs stdev_volume = pe.Node( fsl.maths.StdImage(dimension='T'), name="stdev_volume", mem_gb=DEFAULT_MEMORY_MIN_GB, ) + mean_volume = pe.Node( fsl.maths.MeanImage(dimension='T'), name="mean_volume", mem_gb=DEFAULT_MEMORY_MIN_GB, ) - # merge_cov_inputs = pe.Node( - # niu.Merge(1), - # name="merge_cov_inputs", - # mem_gb=DEFAULT_MEMORY_MIN_GB, - # ) + cov_volume = pe.Node( - fsl.maths.BinaryMaths(op_string='-div %s '), + fsl.maths.BinaryMaths(operation='div'), name="cov_volume", mem_gb=DEFAULT_MEMORY_MIN_GB, ) - + cov_ribbon = pe.Node( fsl.ApplyMask(), name="cov_ribbon", mem_gb=DEFAULT_MEMORY_MIN_GB, ) - + cov_ribbon_mean = pe.Node( fsl.ImageStats(op_string='-M '), - name="cov_mean", + name="cov_ribbon_mean", mem_gb=DEFAULT_MEMORY_MIN_GB, ) + cov_ribbon_std = pe.Node( fsl.ImageStats(op_string='-S '), - name="cov_std", + name="cov_ribbon_std", mem_gb=DEFAULT_MEMORY_MIN_GB, ) - + cov_ribbon_norm = pe.Node( fsl.maths.BinaryMaths(operation='div'), name="cov_ribbon_norm", mem_gb=DEFAULT_MEMORY_MIN_GB, ) - + smooth_norm = pe.Node( fsl.maths.MathsCommand(args="-bin -s 5 "), name="smooth_norm", mem_gb=DEFAULT_MEMORY_MIN_GB, ) - + merge_smooth_norm = pe.Node( niu.Merge(1), name="merge_smooth_norm", mem_gb=DEFAULT_MEMORY_MIN_GB, ) - + cov_ribbon_norm_smooth = pe.Node( - fsl.maths.MultiImageMaths(args="-s 5 -div %s -dilD "), + fsl.maths.MultiImageMaths(op_string='-s 5 -div %s -dilD '), name="cov_ribbon_norm_smooth", mem_gb=DEFAULT_MEMORY_MIN_GB, ) - - cov_norm_modulate = pe.Node( - fsl.maths.MultiImageMaths(op_string='-div %s -div %s '), - name="cov_ribbon_modulate", + + cov_norm = pe.Node( + fsl.maths.BinaryMaths(operation='div'), + name="cov_norm", mem_gb=DEFAULT_MEMORY_MIN_GB, ) - merge_cov_stats = pe.Node( - niu.Merge(2), - name="merge_cov_stats", + cov_norm_modulate = pe.Node( + fsl.maths.BinaryMaths(operation='div'), + name="cov_norm_modulate", mem_gb=DEFAULT_MEMORY_MIN_GB, ) @@ -393,6 +436,7 @@ def select_target(subject_id, space): name="cov_norm_modulate_ribbon", mem_gb=DEFAULT_MEMORY_MIN_GB, ) + def _calc_upper_thr(in_stats): return in_stats[0] + (in_stats[1] * 0.5) @@ -419,159 +463,229 @@ def _calc_lower_thr(in_stats): mem_gb=DEFAULT_MEMORY_MIN_GB, ) - upper_thr_volume = pe.Node( - fsl.maths.Threshold(direction='below'), - name="upper_thr_volume", - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - - lower_thr_volume = pe.Node( - fsl.maths.Threshold(direction='above'), - name="lower_thr_volume", - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - - binarize_upper_volume = pe.Node( - fsl.maths.UnaryMaths(operation="bin"), - name="binarize_upper_volume", - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - - binarize_lower_volume = pe.Node( - fsl.maths.UnaryMaths(operation="bin"), - name="binarize_lower_volume", - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - - merge_thr_volume = pe.Node( - niu.Merge(1), - name="merge_thr_volume", - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - mod_ribbon_mean = pe.Node( fsl.ImageStats(op_string='-M '), name="mod_ribbon_mean", mem_gb=DEFAULT_MEMORY_MIN_GB, ) + mod_ribbon_std = pe.Node( fsl.ImageStats(op_string='-S '), name="mod_ribbon_std", mem_gb=DEFAULT_MEMORY_MIN_GB, ) - + merge_mod_ribbon_stats = pe.Node( niu.Merge(2), name="merge_mod_ribbon_stats", mem_gb=DEFAULT_MEMORY_MIN_GB, ) - + + bin_mean_volume = pe.Node( + fsl.maths.UnaryMaths(operation="bin"), + name="bin_mean_volume", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + merge_goodvoxels_operands = pe.Node( niu.Merge(2), name="merge_goodvoxels_operands", mem_gb=DEFAULT_MEMORY_MIN_GB, ) + goodvoxels_thr = pe.Node( + fsl.maths.Threshold(), + name="goodvoxels_thr", + mem_gb=mem_gb, + ) + goodvoxels_mask = pe.Node( - fsl.maths.MultiImageMaths(op_string='-thr %s, -bin, -sub %s -mul, -1'), + fsl.maths.MultiImageMaths(op_string='-bin -sub %s -mul -1 '), name="goodvoxels_mask", mem_gb=mem_gb, ) - ConvertNIFTItoFS = pe.Node( - fs.MRIConvert(out_type='mgz'), - name="ConvertNIFTItoFS", - mem_gb=mem_gb, + goodvoxels_ribbon_mask = pe.Node( + fsl.ApplyMask(), + name_source=['in_file'], + keep_extension=True, + name_template='%s', + name="goodvoxels_ribbon_mask", + mem_gb=DEFAULT_MEMORY_MIN_GB, ) - apply_goodvoxels_mask = pe.Node( - fs.ApplyMask(), - name="apply_goodvoxels_mask", + apply_goodvoxels_ribbon_mask = pe.Node( + fsl.ApplyMask(), + name_source=['in_file'], + keep_extension=True, + name_template='%s', + name="apply_goodvoxels_ribbon_mask", mem_gb=mem_gb * 3, ) - - goodvoxels_sampler = pe.MapNode( - fs.SampleToSurface( - cortex_mask=True, - interp_method="trilinear", - out_type="gii", - override_reg_subj=True, - sampling_method="average", - sampling_range=(0, 1, 0.2), - sampling_units="frac", + + get_target_wm = pe.MapNode( + FreeSurferSource(), + iterfield=["in_file", "surface"], + name="get_target_wm", + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + get_target_wm.inputs.hemi = ["lh", "rh"] + + make_target_midthick = pe.MapNode( + MakeMidthickness(thickness=True, distance=0.5), + iterfield=["in_file"], + name="make_target_midthick", + run_without_submitting=True, + mem_gb=mem_gb * 3, + ) + + target_midthick_gifti = pe.MapNode( + fs.MRIsConvert(out_datatype="gii"), + iterfield=["in_file"], + name="target_midthick_gifti", + run_without_submitting=True, + mem_gb=mem_gb, + ) + + metric_dilate = pe.MapNode( + MetricDilate( + distance=10, + nearest=True, ), - iterfield=["hemi"], - name="goodvoxels_sampler", + iterfield=["in_file", "surface"], + name="metric_dilate", mem_gb=mem_gb * 3, ) - goodvoxels_sampler.inputs.hemi = ["lh", "rh"] + + # make HCP-style ribbon volume in bold space # fmt:off workflow.connect([ - # (inputnode, medial_nans, [("subjects_dir", "subjects_dir")]), - # (sampler, medial_nans, [("binary_file", "in_file")]), - # (medial_nans, sample_medial_wall_to_volume, [("out_file", "source_file")]), - # (inputnode, sample_medial_wall_to_volume, [("source_file", "template_file")]), - # (inputnode, sample_medial_wall_to_volume, [("subjects_dir", "subjects_dir")]), - # (inputnode, sample_medial_wall_to_volume, [("subject_id", "subject_id")]), - # (sample_medial_wall_to_volume, binarize_volume, [("out_file", "in_file")]), - # (binarize_volume, mask_volume, [("out_file", "mask_file")]), - # (inputnode, mask_volume, [("source_file", "in_file")]), - # (mask_volume, ConvertFStoNIFTI, [("out_file", "in_file")]), - # (ConvertFStoNIFTI, stdev_volume, [("out_file", "in_file")]), - # (ConvertFStoNIFTI, mean_volume, [("out_file", "in_file")]), + (inputnode, select_wm, [("anat_giftis", "inlist")]), + (inputnode, select_pial, [("anat_giftis", "inlist")]), + (inputnode, select_midthick, [("anat_giftis", "inlist")]), + (select_wm, create_wm_distvol, [("out", "surface")]), + (inputnode, create_wm_distvol, [("t1w_mask", "ref_space")]), + (select_pial, create_pial_distvol, [("out", "surface")]), + (inputnode, create_pial_distvol, [("t1w_mask", "ref_space")]), + (create_wm_distvol, thresh_wm_distvol, [("out_vol", "in_file")]), + (create_pial_distvol, uthresh_pial_distvol, [("out_vol", "in_file")]), + (thresh_wm_distvol, bin_wm_distvol, [("out_file", "in_file")]), + (uthresh_pial_distvol, bin_pial_distvol, [("out_file", "in_file")]), + (bin_wm_distvol, split_wm_distvol, [("out_file", "inlist")]), + (split_wm_distvol, merge_wm_distvol_no_flatten, [("out1", "in1")]), + (split_wm_distvol, merge_wm_distvol_no_flatten, [("out2", "in2")]), + (bin_pial_distvol, make_ribbon_vol, [("out_file", "in_file")]), + (merge_wm_distvol_no_flatten, make_ribbon_vol, [("out", "operand_files")]), + (make_ribbon_vol, bin_ribbon_vol, [("out_file", "in_file")]), + (bin_ribbon_vol, split_squeeze_ribbon_vol, [("out_file", "inlist")]), + (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [("out1", "in_file")]), + (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [("out2", "operand_file")]), + ]) + + # make HCP-style "goodvoxels" mask in t1w space for filtering outlier voxels + # in bold timeseries, based on modulated normalized covariance + workflow.connect([ + (combine_ribbon_vol_hemis, ribbon_boldsrc_xfm, [("out_file", 'input_image')]), (rename_src, stdev_volume, [("out_file", "in_file")]), (rename_src, mean_volume, [("out_file", "in_file")]), + (mean_volume, ribbon_boldsrc_xfm, [('out_file', 'reference_image')]), (stdev_volume, cov_volume, [("out_file", "in_file")]), (mean_volume, cov_volume, [("out_file", "operand_file")]), (cov_volume, cov_ribbon, [("out_file", "in_file")]), - (combine_ribbon_vol_hemis, cov_ribbon, [("out_file", "mask_file")]), + (ribbon_boldsrc_xfm, cov_ribbon, [("output_image", "mask_file")]), (cov_ribbon, cov_ribbon_mean, [("out_file", "in_file")]), (cov_ribbon, cov_ribbon_std, [("out_file", "in_file")]), (cov_ribbon, cov_ribbon_norm, [("out_file", "in_file")]), - (cov_ribbon_mean, cov_ribbon_norm, [("out_file", "operand_file")]), + (cov_ribbon_mean, cov_ribbon_norm, [("out_stat", "operand_value")]), (cov_ribbon_norm, smooth_norm, [("out_file", "in_file")]), (smooth_norm, merge_smooth_norm, [("out_file", "in1")]), (cov_ribbon_norm, cov_ribbon_norm_smooth, [("out_file", "in_file")]), (merge_smooth_norm, cov_ribbon_norm_smooth, [("out", "operand_files")]), - (cov_ribbon_mean, merge_cov_stats, [("out_file", "in1")]), - (cov_ribbon_norm_smooth, merge_cov_stats, [("out_file", "in2")]), - (merge_cov_stats, cov_norm_modulate[("out, operand_files")]), - (cov_volume, cov_norm_modulate, [("out_file, in_file")]), - (cov_norm_modulate, cov_norm_modulate_ribbon, [("out_file, in_file")]), - (combine_ribbon_vol_hemis, cov_norm_modulate_ribbon, [("out, mask_file")]), + (cov_ribbon_mean, cov_norm, [("out_stat", "operand_value")]), + (cov_volume, cov_norm, [("out_file", "in_file")]), + (cov_norm, cov_norm_modulate, [("out_file", "in_file")]), + (cov_ribbon_norm_smooth, cov_norm_modulate, [("out_file", "operand_file")]), + (cov_norm_modulate, cov_norm_modulate_ribbon, [("out_file", "in_file")]), + (ribbon_boldsrc_xfm, cov_norm_modulate_ribbon, [("output_image", "mask_file")]), (cov_norm_modulate_ribbon, mod_ribbon_mean, [("out_file", "in_file")]), (cov_norm_modulate_ribbon, mod_ribbon_std, [("out_file", "in_file")]), (mod_ribbon_mean, merge_mod_ribbon_stats, [("out_stat", "in1")]), (mod_ribbon_std, merge_mod_ribbon_stats, [("out_stat", "in2")]), - (merge_mod_ribbon_stats, upper_thr_val, [("out, in_stats")]), - (merge_mod_ribbon_stats, lower_thr_val, [("out, in_stats")]), - (upper_thr_val, merge_goodvoxels_operands, [("out_thresh, in1")]), - (inputnode, merge_goodvoxels_operands, [("t1w_mask", "in2")]), - (cov_norm_modulate, goodvoxels_mask, [("out_file, in_file")]), - (merge_goodvoxels_operands, goodvoxels_mask, [("out_file, operand_files")]), - - - - - # (merge_cov_stats, lower_thr_val, [("out", "in_stats")]), - # (cov_ribbon, upper_thr_volume, [("out_file", "in_file")]), - # (upper_thr_val, upper_thr_volume, [("upper_thresh", "thresh")]), - # (cov_ribbon, lower_thr_volume, [("out_file", "in_file")]), - # (lower_thr_val, lower_thr_volume, [("lower_thresh", "thresh")]), - # (upper_thr_volume, binarize_upper_volume, [("out_file", "in_file")]), - # (lower_thr_volume, binarize_lower_volume, [("out_file", "in_file")]), - # (binarize_upper_volume, merge_thr_volume, [("out_file", "in1")]), - # (merge_thr_volume, filter_for_outliers, [("out_file", "operand_files")]), - # (binarize_lower_volume, filter_for_outliers, [("out_file", "in_file")]), - # (filter_for_outliers, ConvertNIFTItoFS, [("out_file", "in_file")]), - (goodvoxels_mask, ConvertNIFTItoFS, [("out_file", "in_file")]), - (ConvertNIFTItoFS, apply_goodvoxels_mask, [("out_file", "mask_file")]), - (rename_src, apply_goodvoxels_mask, [("out_file", "in_file")]), - (apply_goodvoxels_mask, sampler, [("out_file", "in_file")]), - (sampler, update_metadata, [("out_file", "in_file")]), + (merge_mod_ribbon_stats, upper_thr_val, [("out", "in_stats")]), + (merge_mod_ribbon_stats, lower_thr_val, [("out", "in_stats")]), + (mean_volume, bin_mean_volume, [("out_file", "in_file")]), + (upper_thr_val, goodvoxels_thr, [("upper_thresh", "thresh")]), + (cov_norm_modulate, goodvoxels_thr, [("out_file", "in_file")]), + (bin_mean_volume, merge_goodvoxels_operands, [("out_file", "in1")]), + (goodvoxels_thr, goodvoxels_mask, [("out_file", "in_file")]), + (merge_goodvoxels_operands, goodvoxels_mask, [("out", "operand_files")]), + ]) + + # apply goodvoxels ribbon mask to bold + workflow.connect([ + (goodvoxels_mask, goodvoxels_ribbon_mask, [("out_file", "in_file")]), + (ribbon_boldsrc_xfm, goodvoxels_ribbon_mask, [("output_image", "mask_file")]), + (goodvoxels_ribbon_mask, apply_goodvoxels_ribbon_mask, [("out_file", "mask_file")]), + (rename_src, apply_goodvoxels_ribbon_mask, [("out_file", "in_file")]), + ]) + + # project masked bold to target surfs + workflow.connect([ + (inputnode, get_fsnative, [("subject_id", "subject_id"), + ("subjects_dir", "subjects_dir")]), + (inputnode, targets, [("subject_id", "subject_id")]), + (inputnode, rename_src, [("source_file", "in_file")]), + (inputnode, itk2lta, [("source_file", "src_file"), + ("t1w2fsnative_xfm", "in_file")]), + (get_fsnative, itk2lta, [("T1", "dst_file")]), + (inputnode, sampler, [("subjects_dir", "subjects_dir"), + ("subject_id", "subject_id")]), + (itersource, targets, [("target", "space")]), + (itersource, rename_src, [("target", "subject")]), + (itk2lta, sampler, [("out", "reg_file")]), + (targets, sampler, [("out", "target_subject")]), + (apply_goodvoxels_ribbon_mask, sampler, [("out_file", "source_file")]), (update_metadata, outputnode, [("out_file", "surfaces")]), (itersource, outputnode, [("target", "target")]), ]) + + # fmt:on + if not medial_surface_nan: + # fmt:off + workflow.connect([ + (inputnode, get_target_wm, [('subjects_dir', 'subjects_dir')]), + (targets, get_target_wm, [('out', 'subject_id')]), + (get_target_wm, make_target_midthick, [("white", "in_file")]), + (make_target_midthick, target_midthick_gifti, [("out_file", "in_file")]), + (sampler, metric_dilate, [("out_file", "in_file")]), + (target_midthick_gifti, metric_dilate, [("converted", "surface")]), + (metric_dilate, update_metadata, [("out_file", "in_file")]), + ]) + # fmt:on + return workflow + + from niworkflows.interfaces.freesurfer import MedialNaNs + + # Refine if medial vertices should be NaNs + medial_nans = pe.MapNode( + MedialNaNs(), + iterfield=["in_file"], + name="medial_nans", + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + + # fmt:off + workflow.connect([ + (inputnode, get_target_wm, [('subjects_dir', 'subjects_dir')]), + (targets, get_target_wm, [('out', 'subject_id')]), + (get_target_wm, make_target_midthick, [("white", "in_file")]), + (make_target_midthick, target_midthick_gifti, [("out_file", "in_file")]), + (sampler, metric_dilate, [("out_file", "in_file")]), + (target_midthick_gifti, metric_dilate, [("converted", "surface")]), + (metric_dilate, medial_nans, [("out_file", "in_file")]), + (medial_nans, update_metadata, [("out_file", "in_file")]), + ]) # fmt:on return workflow From 30ae5312f88b512c1412d20f48cd5ba70f6d4d56 Mon Sep 17 00:00:00 2001 From: madisoth <56737848+madisoth@users.noreply.github.com> Date: Wed, 14 Sep 2022 15:26:54 -0700 Subject: [PATCH 07/14] revert work on workbench-based surface sampling wf (to be continued in a separate branch) --- fmriprep/workflows/bold/resampling.py | 192 +++++++++----------------- 1 file changed, 62 insertions(+), 130 deletions(-) diff --git a/fmriprep/workflows/bold/resampling.py b/fmriprep/workflows/bold/resampling.py index f3a3f828e..0c311e6f7 100644 --- a/fmriprep/workflows/bold/resampling.py +++ b/fmriprep/workflows/bold/resampling.py @@ -36,11 +36,9 @@ from nipype.pipeline import engine as pe from nipype.interfaces import utility as niu, freesurfer as fs, fsl import nipype.interfaces.workbench as wb -from niworkflows.interfaces.freesurfer import MakeMidthickness from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms +from niworkflows.interfaces.freesurfer import MedialNaNs from ...interfaces.volume import CreateSignedDistanceVolume -from ...interfaces.metric import MetricDilate - def init_bold_surf_wf(mem_gb, @@ -53,12 +51,7 @@ def init_bold_surf_wf(mem_gb, For each vertex, the cortical ribbon is sampled at six points (spaced 20% of thickness apart) and averaged. - - If --surface-sampler wb is used, Workbench's wb_command -volume-to-surface-mapping - with -ribbon-constrained option is used instead of the default FreeSurfer mri_vol2surf. - Note that unlike HCP, no additional spatial smoothing is applied to the surface-projected - data. - + If --project-goodvoxels is used, a "goodvoxels" BOLD mask, as described in [@hcppipelines], is generated and applied to the functional image before sampling to surface. Outputs are in GIFTI format. @@ -72,9 +65,8 @@ def init_bold_surf_wf(mem_gb, wf = init_bold_surf_wf(mem_gb=0.1, surface_spaces=['fsnative', 'fsaverage5'], medial_surface_nan=False, - project_goodvoxels=False, - surface_sampler="fs") - + project_goodvoxels=False) + Parameters ---------- surface_spaces : :obj:`list` @@ -87,9 +79,6 @@ def init_bold_surf_wf(mem_gb, project_goodvoxels : :obj:`bool` Exclude voxels with locally high coefficient of variation, or that lie outside the cortical surfaces, from the surface projection. - surface_sampler : :obj:`str` - 'fs' (default) or 'wb' to specify FreeSurfer-based or Workbench-based - volume to surface mapping Inputs ------ @@ -124,15 +113,15 @@ def init_bold_surf_wf(mem_gb, (FreeSurfer reconstruction nomenclature): {out_spaces}. """.format( - out_spaces=", ".join(["*%s*" % s for s in surface_spaces]) + out_spaces=", ".join(["*%s*" % s for s in surface_spaces]), ) if project_goodvoxels: workflow.__desc__ += """\ Before resampling, a "goodvoxels" mask [@hcppipelines] was applied, -excluding voxels whose time-series have a locally high coefficient of -variation, or that lie outside the cortical surfaces, -from the surface projection. +excluding voxels whose time-series have a locally high coetfficient of +variation, or that lie outside the cortical surfaces, from the +surface projection. """ inputnode = pe.Node( @@ -141,7 +130,6 @@ def init_bold_surf_wf(mem_gb, "subject_id", "subjects_dir", "t1w2fsnative_xfm", - "itk_bold_to_t1", "anat_giftis", "t1w_mask"] ), @@ -190,13 +178,17 @@ def select_target(subject_id, space): ), name_source=['source_file'], keep_extension=False, - name_template='%s.func.gii', iterfield=["hemi"], name="sampler", mem_gb=mem_gb * 3, ) sampler.inputs.hemi = ["lh", "rh"] + # Refine if medial vertices should be NaNs + medial_nans = pe.MapNode( + MedialNaNs(), iterfield=["in_file"], name="medial_nans", mem_gb=DEFAULT_MEMORY_MIN_GB + ) + update_metadata = pe.MapNode( GiftiSetAnatomicalStructure(), iterfield=["in_file"], @@ -209,50 +201,11 @@ def select_target(subject_id, space): joinsource="itersource", name="outputnode", ) - - if not project_goodvoxels: - # fmt: off - workflow.connect([ - (inputnode, get_fsnative, [('subject_id', 'subject_id'), - ('subjects_dir', 'subjects_dir')]), - (inputnode, targets, [('subject_id', 'subject_id')]), - (inputnode, rename_src, [('source_file', 'in_file')]), - (inputnode, itk2lta, [('source_file', 'src_file'), - ('t1w2fsnative_xfm', 'in_file')]), - (get_fsnative, itk2lta, [('T1', 'dst_file')]), - (inputnode, sampler, [('subjects_dir', 'subjects_dir'), - ('subject_id', 'subject_id')]), - (itersource, targets, [('target', 'space')]), - (itersource, rename_src, [('target', 'subject')]), - (itk2lta, sampler, [('out', 'reg_file')]), - (targets, sampler, [('out', 'target_subject')]), - (rename_src, sampler, [('out_file', 'source_file')]), - (update_metadata, outputnode, [('out_file', 'surfaces')]), - (itersource, outputnode, [('target', 'target')]), - ]) - # fmt: on - - if not medial_surface_nan: - workflow.connect(sampler, "out_file", update_metadata, "in_file") - return workflow - - from niworkflows.interfaces.freesurfer import MedialNaNs - - # Refine if medial vertices should be NaNs - medial_nans = pe.MapNode( - MedialNaNs(), iterfield=["in_file"], name="medial_nans", mem_gb=DEFAULT_MEMORY_MIN_GB - ) - - # fmt: off - workflow.connect([ - (inputnode, medial_nans, [('subjects_dir', 'subjects_dir')]), - (sampler, medial_nans, [('out_file', 'in_file')]), - (medial_nans, update_metadata, [('out_file', 'in_file')]), - ]) - # fmt: on - return workflow - # 0, 1, 2, 3, 6, 7 = lh wm, rh wm, lh pial, rh pial, lh mid, rh mid + # 0, 1 = wm; 2, 3 = pial; 6, 7 = mid + # note that order of lh / rh within each surf type is not guaranteed due to use + # of unsorted glob by FreeSurferSource prior, but we can do a sort with + # _sorted_by_basename to ensure consistent ordering select_wm = pe.Node( niu.Select(index=[0, 1]), name="select_wm", @@ -269,7 +222,7 @@ def select_target(subject_id, space): niu.Select(index=[6, 7]), name="select_midthick", mem_gb=DEFAULT_MEMORY_MIN_GB, - ) + ) create_wm_distvol = pe.MapNode( CreateSignedDistanceVolume(), @@ -504,12 +457,10 @@ def _calc_lower_thr(in_stats): name="goodvoxels_mask", mem_gb=mem_gb, ) - + goodvoxels_ribbon_mask = pe.Node( fsl.ApplyMask(), name_source=['in_file'], - keep_extension=True, - name_template='%s', name="goodvoxels_ribbon_mask", mem_gb=DEFAULT_MEMORY_MIN_GB, ) @@ -517,56 +468,53 @@ def _calc_lower_thr(in_stats): apply_goodvoxels_ribbon_mask = pe.Node( fsl.ApplyMask(), name_source=['in_file'], - keep_extension=True, - name_template='%s', name="apply_goodvoxels_ribbon_mask", mem_gb=mem_gb * 3, ) - get_target_wm = pe.MapNode( - FreeSurferSource(), - iterfield=["in_file", "surface"], - name="get_target_wm", - run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - get_target_wm.inputs.hemi = ["lh", "rh"] - - make_target_midthick = pe.MapNode( - MakeMidthickness(thickness=True, distance=0.5), - iterfield=["in_file"], - name="make_target_midthick", - run_without_submitting=True, - mem_gb=mem_gb * 3, - ) + if not project_goodvoxels: + # fmt: off + workflow.connect([ + (inputnode, get_fsnative, [('subject_id', 'subject_id'), + ('subjects_dir', 'subjects_dir')]), + (inputnode, targets, [('subject_id', 'subject_id')]), + (inputnode, rename_src, [('source_file', 'in_file')]), + (inputnode, itk2lta, [('source_file', 'src_file'), + ('t1w2fsnative_xfm', 'in_file')]), + (get_fsnative, itk2lta, [("T1", "dst_file")]), + (inputnode, sampler, [('subjects_dir', 'subjects_dir'), + ('subject_id', 'subject_id')]), + (itersource, targets, [('target', 'space')]), + (itersource, rename_src, [('target', 'subject')]), + (itk2lta, sampler, [('out', 'reg_file')]), + (targets, sampler, [('out', 'target_subject')]), + (rename_src, sampler, [('out_file', 'source_file')]), + (update_metadata, outputnode, [('out_file', 'surfaces')]), + (itersource, outputnode, [('target', 'target')]), + ]) + # fmt: on - target_midthick_gifti = pe.MapNode( - fs.MRIsConvert(out_datatype="gii"), - iterfield=["in_file"], - name="target_midthick_gifti", - run_without_submitting=True, - mem_gb=mem_gb, - ) + if not medial_surface_nan: + workflow.connect(sampler, "out_file", update_metadata, "in_file") + return workflow - metric_dilate = pe.MapNode( - MetricDilate( - distance=10, - nearest=True, - ), - iterfield=["in_file", "surface"], - name="metric_dilate", - mem_gb=mem_gb * 3, - ) + # fmt: off + workflow.connect([ + (inputnode, medial_nans, [('subjects_dir', 'subjects_dir')]), + (sampler, medial_nans, [('out_file', 'in_file')]), + (medial_nans, update_metadata, [('out_file', 'in_file')]), + ]) + # fmt: on + return workflow - # make HCP-style ribbon volume in bold space - # fmt:off + # make HCP-style ribbon volume in T1w space workflow.connect([ (inputnode, select_wm, [("anat_giftis", "inlist")]), (inputnode, select_pial, [("anat_giftis", "inlist")]), (inputnode, select_midthick, [("anat_giftis", "inlist")]), - (select_wm, create_wm_distvol, [("out", "surface")]), + (select_wm, create_wm_distvol, [(("out", _sorted_by_basename), "surface")]), (inputnode, create_wm_distvol, [("t1w_mask", "ref_space")]), - (select_pial, create_pial_distvol, [("out", "surface")]), + (select_pial, create_pial_distvol, [(("out", _sorted_by_basename), "surface")]), (inputnode, create_pial_distvol, [("t1w_mask", "ref_space")]), (create_wm_distvol, thresh_wm_distvol, [("out_vol", "in_file")]), (create_pial_distvol, uthresh_pial_distvol, [("out_vol", "in_file")]), @@ -654,36 +602,15 @@ def _calc_lower_thr(in_stats): if not medial_surface_nan: # fmt:off workflow.connect([ - (inputnode, get_target_wm, [('subjects_dir', 'subjects_dir')]), - (targets, get_target_wm, [('out', 'subject_id')]), - (get_target_wm, make_target_midthick, [("white", "in_file")]), - (make_target_midthick, target_midthick_gifti, [("out_file", "in_file")]), - (sampler, metric_dilate, [("out_file", "in_file")]), - (target_midthick_gifti, metric_dilate, [("converted", "surface")]), - (metric_dilate, update_metadata, [("out_file", "in_file")]), + (sampler, update_metadata, [("out_file", "in_file")]), ]) # fmt:on return workflow - from niworkflows.interfaces.freesurfer import MedialNaNs - - # Refine if medial vertices should be NaNs - medial_nans = pe.MapNode( - MedialNaNs(), - iterfield=["in_file"], - name="medial_nans", - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - # fmt:off workflow.connect([ - (inputnode, get_target_wm, [('subjects_dir', 'subjects_dir')]), - (targets, get_target_wm, [('out', 'subject_id')]), - (get_target_wm, make_target_midthick, [("white", "in_file")]), - (make_target_midthick, target_midthick_gifti, [("out_file", "in_file")]), - (sampler, metric_dilate, [("out_file", "in_file")]), - (target_midthick_gifti, metric_dilate, [("converted", "surface")]), - (metric_dilate, medial_nans, [("out_file", "in_file")]), + (inputnode, medial_nans, [('subjects_dir', 'subjects_dir')]), + (sampler, medial_nans, [("out_file", "in_file")]), (medial_nans, update_metadata, [("out_file", "in_file")]), ]) # fmt:on @@ -1419,3 +1346,8 @@ def _itk2lta(in_file, src_file, dst_file): in_file, fmt="fs" if in_file.endswith(".lta") else "itk", reference=src_file ).to_filename(out_file, moving=dst_file, fmt="fs") return str(out_file) + + +def _sorted_by_basename(inlist): + from os.path import basename + return sorted(inlist, key=lambda x: str(basename(x))) From c555283eec78d4e84882057ccb16c638f5b04702 Mon Sep 17 00:00:00 2001 From: madisoth <56737848+madisoth@users.noreply.github.com> Date: Wed, 14 Sep 2022 15:41:11 -0700 Subject: [PATCH 08/14] revert work on workench-based surface sampling wf (to be continued in separate branch) --- docs/workflows.rst | 7 +- fmriprep/cli/parser.py | 9 - fmriprep/config.py | 3 - fmriprep/data/tests/config.toml | 1 - fmriprep/interfaces/metric.py | 305 --------------------------- fmriprep/interfaces/sphereproject.py | 69 ------ fmriprep/workflows/bold/base.py | 1 - 7 files changed, 1 insertion(+), 394 deletions(-) delete mode 100644 fmriprep/interfaces/metric.py delete mode 100644 fmriprep/interfaces/sphereproject.py diff --git a/docs/workflows.rst b/docs/workflows.rst index 5b42bce9e..f64124491 100644 --- a/docs/workflows.rst +++ b/docs/workflows.rst @@ -479,8 +479,7 @@ EPI sampled to FreeSurfer surfaces mem_gb=1, surface_spaces=['fsnative', 'fsaverage5'], medial_surface_nan=False, - project_goodvoxels=False, - surface_sampler="fs") + project_goodvoxels=False) If FreeSurfer processing is enabled, the motion-corrected functional series (after single shot resampling to T1w space) is sampled to the @@ -488,10 +487,6 @@ surface by averaging across the cortical ribbon. Specifically, at each vertex, the segment normal to the white-matter surface, extending to the pial surface, is sampled at 6 intervals and averaged. -With ``--surface-sampler wb``, Workbench wb_command -volume-to-surface-mapping -(with -ribbon-constrained) is used for surface sampling instead of the default -FreeSurfer method described above. - Surfaces are generated for the "subject native" surface, as well as transformed to the ``fsaverage`` template space. diff --git a/fmriprep/cli/parser.py b/fmriprep/cli/parser.py index 9def95db5..a468537f6 100644 --- a/fmriprep/cli/parser.py +++ b/fmriprep/cli/parser.py @@ -372,15 +372,6 @@ def _slice_time_ref(value, parser): "from surface resampling. Only performed for GIFTI files mapped to a freesurfer subject " "(fsaverage or fsnative).", ) - g_conf.add_argument( - "--surface-sampler", - action="store", - choices=("fs", "wb"), - default="fs", - help="select sampling method used for projection of BOLD timeseries to surface. " - "'fs' to use FreeSurfer mri_vol2surf (default). " - "'wb' to use Workbench wb_command -volume-to-surface-mapping.", - ) g_conf.add_argument( "--slice-time-ref", required=False, diff --git a/fmriprep/config.py b/fmriprep/config.py index 68e87318c..61da7d0bf 100644 --- a/fmriprep/config.py +++ b/fmriprep/config.py @@ -548,9 +548,6 @@ class workflow(_Config): """Fill medial surface with :abbr:`NaNs (not-a-number)` when sampling.""" project_goodvoxels = False """Exclude voxels with locally high coefficient of variation from sampling.""" - surface_sampler = "fs" - """``fs`` (default) or ``wb`` to select FreeSurfer-based or Workbench-based - method for sampling BOLD to surface.""" regressors_all_comps = None """Return all CompCor components.""" regressors_dvars_th = None diff --git a/fmriprep/data/tests/config.toml b/fmriprep/data/tests/config.toml index 097a974d0..0519e4702 100644 --- a/fmriprep/data/tests/config.toml +++ b/fmriprep/data/tests/config.toml @@ -40,7 +40,6 @@ ignore = [] longitudinal = false medial_surface_nan = false project_goodvoxels = false -surface_sampler = "fs" regressors_all_comps = false regressors_dvars_th = 1.5 regressors_fd_th = 0.5 diff --git a/fmriprep/interfaces/metric.py b/fmriprep/interfaces/metric.py deleted file mode 100644 index 16d699c43..000000000 --- a/fmriprep/interfaces/metric.py +++ /dev/null @@ -1,305 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -"""This module provides interfaces for workbench surface commands""" -import os - -from nipype.interfaces.base import TraitedSpec, File, traits, CommandLineInputSpec -from nipype.interfaces.workbench.base import WBCommand -from nipype import logging - -iflogger = logging.getLogger("nipype.interface") - - -class MetricDilateInputSpec(CommandLineInputSpec): - - in_file = File( - exists=True, - mandatory=True, - argstr="%s ", - position=0, - desc="the metric to dilate", - ) - - surface = File( - exists=True, - mandatory=True, - argstr="%s ", - position=1, - desc="the surface to compute on", - ) - - distance = traits.Float( - mandatory=True, - argstr="%f ", - position=2, - desc="distance in mm to dilate", - ) - - out_file = File( - name_source=["in_file"], - name_template="%s.func.gii", - keep_extension=False, - argstr="%s ", - position=3, - desc="output - the output metric", - ) - - bad_vertex_roi = File( - argstr="-bad-vertex-roi %s ", - position=4, - desc="metric file, positive values denote vertices to have their values replaced", - ) - - data_roi = File( - argstr="-data-roi %s ", - position=5, - desc="metric file, positive values denote vertices that have data", - ) - - column = traits.Int( - position=6, - argstr="-column %d ", - desc="the column number", - ) - - nearest = traits.Bool( - position=7, - argstr="-nearest ", - desc="use the nearest good value instead of a weighted average", - ) - - linear = traits.Bool( - position=8, - argstr="-linear ", - desc="fill in values with linear interpolation along strongest gradient", - ) - - exponent = traits.Float( - argstr="-exponent %f ", - position=9, - default=6.0, - desc="exponent n to use in (area / (distance ^ n)) as the " - "weighting function (default 6)", - ) - - corrected_areas = File( - argstr="-corrected-areas %s ", - position=10, - desc="vertex areas to use instead of computing them from the surface", - ) - - legacy_cutoff = traits.Bool( - position=11, - argstr="-legacy-cutoff ", - desc="use the v1.3.2 method of choosing how many vertices to " - "use when calulating the dilated value with weighted method", - ) - - -class MetricDilateOutputSpec(TraitedSpec): - out_file = File(exists=True, desc="output file") - - -class MetricDilate(WBCommand): - """ - For all data values designated as bad, if they neighbor a good value or - are within the specified distance of a good value in the same kind of - model, replace the value with a distance weighted average of nearby good - values, otherwise set the value to zero. If -nearest is specified, it - will use the value from the closest good value within range instead of a - weighted average. When the input file contains label data, nearest - dilation is used on the surface, and weighted popularity is used in the - volume. - - The -corrected-areas options are intended for dilating on group average - surfaces, but it is only an approximate correction for the reduction of - structure in a group average surface. - - If -bad-vertex-roi is specified, all values, including those with - value zero, are good, except for locations with a positive value in the - ROI. If it is not specified, only values equal to zero are bad. - - >>> from nipype.interfaces.workbench import MetricDilate - >>> metdil = MetricDilate() - >>> metdil.inputs.in_file = 'sub-01_task-rest_bold_space-fsaverage5.L.func.gii' - >>> metdil.inputs.surface = 'sub-01.L.midthickness.surf.gii' - >>> metdil.inputs.distance = 10 - >>> metdil.inputs.nearest = True - >>> metdil.cmdline - 'wb_command -metric-dilate \ - sub-01_task-rest_bold_space-fsaverage5.L.func.gii \ - sub-01.L.midthickness.surf.gii \ - 10 \ - sub-01_task-rest_bold_space-fsaverage5.L.func.gii \ - -nearest' - """ - input_spec = MetricDilateInputSpec - output_spec = MetricDilateOutputSpec - _cmd = "wb_command -metric-dilate " - - -class MetricResampleInputSpec(CommandLineInputSpec): - in_file = File( - exists=True, - mandatory=True, - argstr="%s", - position=0, - desc="The metric file to resample", - ) - current_sphere = File( - exists=True, - mandatory=True, - argstr="%s", - position=1, - desc="A sphere surface with the mesh that the metric is currently on", - ) - new_sphere = File( - exists=True, - mandatory=True, - argstr="%s", - position=2, - desc="A sphere surface that is in register with and" - " has the desired output mesh", - ) - method = traits.Enum( - "ADAP_BARY_AREA", - "BARYCENTRIC", - argstr="%s", - mandatory=True, - position=3, - desc="The method name - ADAP_BARY_AREA method is recommended for" - " ordinary metric data, because it should use all data while" - " downsampling, unlike BARYCENTRIC. If ADAP_BARY_AREA is used," - " exactly one of area_surfs or area_metrics must be specified", - ) - out_file = File( - name_source=["new_sphere"], - name_template="%s.out", - keep_extension=True, - argstr="%s", - position=4, - desc="The output metric", - ) - area_surfs = traits.Bool( - position=5, - argstr="-area-surfs", - xor=["area_metrics"], - desc="Specify surfaces to do vertex area correction based on", - ) - area_metrics = traits.Bool( - position=5, - argstr="-area-metrics", - xor=["area_surfs"], - desc="Specify vertex area metrics to do area correction based on", - ) - current_area = File( - exists=True, - position=6, - argstr="%s", - desc="A relevant anatomical surface with mesh OR" - " a metric file with vertex areas for mesh", - ) - new_area = File( - exists=True, - position=7, - argstr="%s", - desc="A relevant anatomical surface with mesh OR" - " a metric file with vertex areas for mesh", - ) - roi_metric = File( - exists=True, - position=8, - argstr="-current-roi %s", - desc="Input roi on the current mesh used to exclude non-data vertices", - ) - valid_roi_out = traits.Bool( - position=9, - argstr="-valid-roi-out", - desc="Output the ROI of vertices that got data from valid source vertices", - ) - largest = traits.Bool( - position=10, - argstr="-largest", - desc="Use only the value of the vertex with the largest weight", - ) - - -class MetricResampleOutputSpec(TraitedSpec): - out_file = File(exists=True, desc="the output metric") - roi_file = File(desc="ROI of vertices that got data from valid source vertices") - - -class MetricResample(WBCommand): - """ - Resample a metric file to a different mesh - - Resamples a metric file, given two spherical surfaces that are in - register. If ``ADAP_BARY_AREA`` is used, exactly one of -area-surfs or - ``-area-metrics`` must be specified. - - The ``ADAP_BARY_AREA`` method is recommended for ordinary metric data, - because it should use all data while downsampling, unlike ``BARYCENTRIC``. - The recommended areas option for most data is individual midthicknesses - for individual data, and averaged vertex area metrics from individual - midthicknesses for group average data. - - The ``-current-roi`` option only masks the input, the output may be slightly - dilated in comparison, consider using ``-metric-mask`` on the output when - using ``-current-roi``. - - The ``-largest option`` results in nearest vertex behavior when used with - ``BARYCENTRIC``. When resampling a binary metric, consider thresholding at - 0.5 after resampling rather than using ``-largest``. - - >>> from nipype.interfaces.workbench import MetricResample - >>> metres = MetricResample() - >>> metres.inputs.in_file = 'sub-01_task-rest_bold_space-fsaverage5.L.func.gii' - >>> metres.inputs.method = 'ADAP_BARY_AREA' - >>> metres.inputs.current_sphere = 'fsaverage5_std_sphere.L.10k_fsavg_L.surf.gii' - >>> metres.inputs.new_sphere = 'fs_LR-deformed_to-fsaverage.L.sphere.32k_fs_LR.surf.gii' - >>> metres.inputs.area_metrics = True - >>> metres.inputs.current_area = 'fsaverage5.L.midthickness_va_avg.10k_fsavg_L.shape.gii' - >>> metres.inputs.new_area = 'fs_LR.L.midthickness_va_avg.32k_fs_LR.shape.gii' - >>> metres.cmdline - 'wb_command -metric-resample sub-01_task-rest_bold_space-fsaverage5.L.func.gii \ - fsaverage5_std_sphere.L.10k_fsavg_L.surf.gii \ - fs_LR-deformed_to-fsaverage.L.sphere.32k_fs_LR.surf.gii \ - ADAP_BARY_AREA fs_LR-deformed_to-fsaverage.L.sphere.32k_fs_LR.surf.out \ - -area-metrics fsaverage5.L.midthickness_va_avg.10k_fsavg_L.shape.gii \ - fs_LR.L.midthickness_va_avg.32k_fs_LR.shape.gii' - """ - - input_spec = MetricResampleInputSpec - output_spec = MetricResampleOutputSpec - _cmd = "wb_command -metric-resample" - - def _format_arg(self, opt, spec, val): - if opt in ["current_area", "new_area"]: - if not self.inputs.area_surfs and not self.inputs.area_metrics: - raise ValueError( - "{} was set but neither area_surfs or" - " area_metrics were set".format(opt) - ) - if opt == "method": - if ( - val == "ADAP_BARY_AREA" - and not self.inputs.area_surfs - and not self.inputs.area_metrics - ): - raise ValueError( - "Exactly one of area_surfs or area_metrics" " must be specified" - ) - if opt == "valid_roi_out" and val: - # generate a filename and add it to argstr - roi_out = self._gen_filename(self.inputs.in_file, suffix="_roi") - iflogger.info("Setting roi output file as", roi_out) - spec.argstr += " " + roi_out - return super(MetricResample, self)._format_arg(opt, spec, val) - - def _list_outputs(self): - outputs = super(MetricResample, self)._list_outputs() - if self.inputs.valid_roi_out: - roi_file = self._gen_filename(self.inputs.in_file, suffix="_roi") - outputs["roi_file"] = os.path.abspath(roi_file) - return outputs diff --git a/fmriprep/interfaces/sphereproject.py b/fmriprep/interfaces/sphereproject.py deleted file mode 100644 index b2348238c..000000000 --- a/fmriprep/interfaces/sphereproject.py +++ /dev/null @@ -1,69 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -"""This module provides interfaces for workbench sphere projection commands""" - -# from distutils.cmd import Command -from signal import valid_signals -from nipype.interfaces.workbench.base import WBCommand -from nipype.interfaces.base import ( - TraitedSpec, - File, - traits, - CommandLineInputSpec, - SimpleInterface, -) -from nipype import logging - - -class SurfaceSphereProjectUnprojectInputSpec(CommandLineInputSpec): - - in_file = File( - exists=True, - mandatory=True, - argstr="%s ", - position=0, - desc="a sphere with the desired output mesh", - ) - - sphere_project_to = File( - exists=True, - mandatory=True, - argstr="%s ", - position=1, - desc="a sphere that aligns with sphere-in", - ) - - sphere_unproject_from = File( - exists=True, - mandatory=True, - argstr="%s ", - position=2, - desc="deformed to the desired output space", - ) - - out_file = File( - name_source="in_file", - name_template="%s_deformed.surf.gii ", - keep_extension=False, - argstr="%s", - position=3, - desc="The sphere output file", - ) - - -class SurfaceSphereProjectUnprojectOutputSpec(TraitedSpec): - out_file = File(exists=True, desc="output file") - - -class SurfaceSphereProjectUnproject(WBCommand): - # COPY REGISTRATION DEFORMATIONS TO DIFFERENT SPHERE - # wb_command -surface-sphere-project-unproject - # - a sphere with the desired output mesh - # - a sphere that aligns with sphere-in - # - deformed to the desired output space - # - output - the output sphere - - input_spec = SurfaceSphereProjectUnprojectInputSpec - output_spec = SurfaceSphereProjectUnprojectOutputSpec - _cmd = "wb_command -surface-sphere-project-unproject " \ No newline at end of file diff --git a/fmriprep/workflows/bold/base.py b/fmriprep/workflows/bold/base.py index 60900cb91..21abc9dd8 100644 --- a/fmriprep/workflows/bold/base.py +++ b/fmriprep/workflows/bold/base.py @@ -948,7 +948,6 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): surface_spaces=freesurfer_spaces, medial_surface_nan=config.workflow.medial_surface_nan, project_goodvoxels=config.workflow.project_goodvoxels, - surface_sampler=config.workflow.surface_sampler, name="bold_surf_wf", ) # fmt:off From ba7f699569d814e93eb63ee852abdca25551ffb7 Mon Sep 17 00:00:00 2001 From: madisoth <56737848+madisoth@users.noreply.github.com> Date: Thu, 15 Sep 2022 10:43:50 -0700 Subject: [PATCH 09/14] volume.py docstring import fix --- fmriprep/interfaces/volume.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fmriprep/interfaces/volume.py b/fmriprep/interfaces/volume.py index 6d8e19386..c780097dd 100644 --- a/fmriprep/interfaces/volume.py +++ b/fmriprep/interfaces/volume.py @@ -133,7 +133,7 @@ class CreateSignedDistanceVolume(WBCommand): (negative) crossings of a vertical ray from the point, then counts as inside if the total is odd, negative, or nonzero, respectively. - >>> from nipype.interfaces.workbench import CreateSignedDistanceVolume + >>> from fmriprep.interfaces.volume import CreateSignedDistanceVolume >>> distvol = CreateSignedDistanceVolume() >>> distvol.inputs.surface = 'sub-01.L.pial.native.surf.gii' >>> distvol.inputs.refspace = 'sub-01_T1w.nii.gz' From a6804471564b73268b4437451bd3e91369494252 Mon Sep 17 00:00:00 2001 From: madisoth <56737848+madisoth@users.noreply.github.com> Date: Thu, 15 Sep 2022 13:47:26 -0700 Subject: [PATCH 10/14] remove CreateSignedDistanceVolume doctest for now (needs anatomical surfaces for test sub) --- fmriprep/interfaces/volume.py | 21 --------------------- 1 file changed, 21 deletions(-) diff --git a/fmriprep/interfaces/volume.py b/fmriprep/interfaces/volume.py index c780097dd..b96f1e45d 100644 --- a/fmriprep/interfaces/volume.py +++ b/fmriprep/interfaces/volume.py @@ -133,27 +133,6 @@ class CreateSignedDistanceVolume(WBCommand): (negative) crossings of a vertical ray from the point, then counts as inside if the total is odd, negative, or nonzero, respectively. - >>> from fmriprep.interfaces.volume import CreateSignedDistanceVolume - >>> distvol = CreateSignedDistanceVolume() - >>> distvol.inputs.surface = 'sub-01.L.pial.native.surf.gii' - >>> distvol.inputs.refspace = 'sub-01_T1w.nii.gz' - >>> distvol.inputs.out_vol = 'sub-01.L.pial.native.surf.distvol.nii.gz' - >>> distvol.inputs.roi_out = 'sub-01.L.pial.native.surf.distvolroi.nii.gz' - >>> distvol.inputs.fill_value = 0 - >>> distvol.inputs.exact_limit = 5 - >>> distvol.inputs.approx_limit = 20 - >>> distvol.inputs.approx_neighborhood = 2 - >>> distvol.inputs.winding_method = 'EVEN_ODD' - >>> distvol.cmdline - 'wb_command -create-signed-distance-volume sub-01.L.pial.native.surf.gii \ - sub-01_T1w.nii.gz \ - sub-01.L.pial.native.surf.distvol.nii.gz \ - -roi-out sub-01.L.pial.native.surf.distvolroi.nii.gz \ - -fill-value 0 \ - -exact-limit 5 \ - -approx-limit 20 \ - -approx-neighborhood 2 \ - -winding EVEN_ODD' """ input_spec = CreateSignedDistanceVolumeInputSpec From 17fba921404f802ee11d981cddf105893f146e00 Mon Sep 17 00:00:00 2001 From: madisoth <56737848+madisoth@users.noreply.github.com> Date: Thu, 15 Sep 2022 14:32:37 -0700 Subject: [PATCH 11/14] remove unused itk_bold_to_t1 from bold_surf_wf inputnode --- fmriprep/workflows/bold/base.py | 2 -- fmriprep/workflows/bold/resampling.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/fmriprep/workflows/bold/base.py b/fmriprep/workflows/bold/base.py index 21abc9dd8..1c7762a8f 100644 --- a/fmriprep/workflows/bold/base.py +++ b/fmriprep/workflows/bold/base.py @@ -959,8 +959,6 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): ("anat_giftis", "inputnode.anat_giftis"), ("t1w_mask", "inputnode.t1w_mask"), ]), - (bold_reg_wf, bold_surf_wf, [("outputnode.itk_bold_to_t1", - "inputnode.itk_bold_to_t1")]), (bold_t1_trans_wf, bold_surf_wf, [("outputnode.bold_t1", "inputnode.source_file")]), (bold_surf_wf, outputnode, [("outputnode.surfaces", "surfaces")]), (bold_surf_wf, func_derivatives_wf, [("outputnode.target", "inputnode.surf_refs")]), diff --git a/fmriprep/workflows/bold/resampling.py b/fmriprep/workflows/bold/resampling.py index 0c311e6f7..259c67ac8 100644 --- a/fmriprep/workflows/bold/resampling.py +++ b/fmriprep/workflows/bold/resampling.py @@ -92,8 +92,6 @@ def init_bold_surf_wf(mem_gb, FreeSurfer subject ID t1w2fsnative_xfm LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space - itk_bold_to_t1 - Affine transform from ``ref_bold_brain`` to T1 space (ITK format) anat_giftis GIFTI anatomical surfaces in T1w space From ca9d5d48a470beb7c59b30edf5caab1604d86031 Mon Sep 17 00:00:00 2001 From: Greg Conan Date: Tue, 6 Dec 2022 16:54:24 -0600 Subject: [PATCH 12/14] Untested 1st draft integrating madisoth/nibabies:enh/project-goodvoxels --- fmriprep/interfaces/volume.py | 83 +++--- fmriprep/workflows/bold/resampling.py | 409 +++++++++++++++----------- 2 files changed, 277 insertions(+), 215 deletions(-) diff --git a/fmriprep/interfaces/volume.py b/fmriprep/interfaces/volume.py index b96f1e45d..4c6187b33 100644 --- a/fmriprep/interfaces/volume.py +++ b/fmriprep/interfaces/volume.py @@ -12,82 +12,75 @@ class CreateSignedDistanceVolumeInputSpec(CommandLineInputSpec): - surface = File( + surf_file = File( exists=True, mandatory=True, - argstr="%s ", + argstr="%s", position=0, - desc="the input surface", + desc="Input surface GIFTI file (.surf.gii)", ) - ref_space = File( + ref_file = File( exists=True, mandatory=True, - argstr="%s ", + argstr="%s", position=1, - desc="a volume in the desired output space (dims, spacing, origin)", + desc="NIfTI volume in the desired output space (dims, spacing, origin)", ) - out_vol = File( + out_file = File( name_source=["surface"], - name_template="%s.distvol.nii.gz", - argstr="%s ", + name_template="%s_distvol.nii.gz", + argstr="%s", position=2, - desc="output - the output volume", + desc="Name of output volume containing signed distances", ) - roi_out = File( + out_mask = File( name_source=["surface"], - name_template="%s.distvolroi.nii.gz", - argstr="-roi-out %s ", - position=3, - desc="output roi volume of where the output has a computed value", + name_template="%s_distmask.nii.gz", + argstr="-roi-out %s", + desc="Name of file to store a mask where the ``out_file`` has a computed value", ) fill_value = traits.Float( mandatory=False, - argstr="-fill-value %f ", - position=4, - desc="specify a value to put in all voxels that don't get assigned a distance, default 0", + default=0., + usedefault=True, + argstr="-fill-value %f", + desc="value to put in all voxels that don't get assigned a distance", ) exact_limit = traits.Float( - mandatory=False, - argstr="-exact-limit %f ", - position=5, - desc="specify distance for exact output in mm, default 5", + default=5., + usedefault=True, + argstr="-exact-limit %f", + desc="distance for exact output in mm", ) approx_limit = traits.Float( - mandatory=False, - argstr="-approx-limit %f ", - position=6, - desc="specify distance for approximate output in mm, default 20", + default=20., + usedefault=True, + argstr="-approx-limit %f", + desc="distance for approximate output in mm", ) approx_neighborhood = traits.Int( - mandatory=False, - argstr="-approx-neighborhood %d ", - position=7, - desc="size of neighborhood cube measured from center to face, default 2 = 5x5x5", + default=2, + usedefault=True, + argstr="-approx-neighborhood %d", + desc="size of neighborhood cube measured from center to face in voxels, default 2 = 5x5x5", ) winding_method = traits.Enum( "EVEN_ODD", "NEGATIVE", "NONZERO", "NORMALS", - argstr="-winding %s ", + argstr="-winding %s", usedefault=True, - position=8, - desc="winding method for point inside surface test, choices: " - "EVEN_ODD (default) " - "NEGATIVE " - "NONZERO " - "NORMALS " + desc="winding method for point inside surface test" ) - class CreateSignedDistanceVolumeOutputSpec(TraitedSpec): - out_vol = File(exists=True, desc="output - the output volume") - roi_out = File(desc="output roi volume of where the output has a computed value") + out_file = File(desc="Name of output volume containing signed distances") + out_mask = File(desc="Name of file to store a mask where the ``out_file`` has a computed value") class CreateSignedDistanceVolume(WBCommand): - """ -CREATE SIGNED DISTANCE VOLUME FROM SURFACE + """CREATE SIGNED DISTANCE VOLUME FROM SURFACE wb_command -create-signed-distance-volume - the input surface - a volume in the desired output space (dims, spacing, origin) @@ -137,8 +130,4 @@ class CreateSignedDistanceVolume(WBCommand): input_spec = CreateSignedDistanceVolumeInputSpec output_spec = CreateSignedDistanceVolumeOutputSpec - _cmd = "wb_command -create-signed-distance-volume" - - def _list_outputs(self): - outputs = super(CreateSignedDistanceVolume, self)._list_outputs() - return outputs + _cmd = "wb_command -create-signed-distance-volume" \ No newline at end of file diff --git a/fmriprep/workflows/bold/resampling.py b/fmriprep/workflows/bold/resampling.py index 259c67ac8..4839cd6a0 100644 --- a/fmriprep/workflows/bold/resampling.py +++ b/fmriprep/workflows/bold/resampling.py @@ -101,6 +101,7 @@ def init_bold_surf_wf(mem_gb, BOLD series, resampled to FreeSurfer surfaces """ + import templateflow as tf from nipype.interfaces.io import FreeSurferSource from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.surf import GiftiSetAnatomicalStructure @@ -109,7 +110,7 @@ def init_bold_surf_wf(mem_gb, workflow.__desc__ = """\ The BOLD time-series were resampled onto the following surfaces (FreeSurfer reconstruction nomenclature): -{out_spaces}. +{out_spaces} """.format( out_spaces=", ".join(["*%s*" % s for s in surface_spaces]), ) @@ -124,12 +125,14 @@ def init_bold_surf_wf(mem_gb, inputnode = pe.Node( niu.IdentityInterface( - fields=["source_file", - "subject_id", - "subjects_dir", - "t1w2fsnative_xfm", - "anat_giftis", - "t1w_mask"] + fields=[ + "source_file", + "subject_id", + "subjects_dir", + "t1w2fsnative_xfm", + "anat_giftis", + "t1w_mask", + ] ), name="inputnode", ) @@ -137,9 +140,7 @@ def init_bold_surf_wf(mem_gb, itersource = pe.Node(niu.IdentityInterface(fields=["target"]), name="itersource") itersource.iterables = [("target", surface_spaces)] - get_fsnative = pe.Node( - FreeSurferSource(), name="get_fsnative", run_without_submitting=True - ) + get_fsnative = pe.Node(FreeSurferSource(), name="get_fsnative", run_without_submitting=True) def select_target(subject_id, space): """Get the target subject ID, given a source subject ID and a target space.""" @@ -160,9 +161,7 @@ def select_target(subject_id, space): mem_gb=DEFAULT_MEMORY_MIN_GB, ) - itk2lta = pe.Node( - niu.Function(function=_itk2lta), name="itk2lta", run_without_submitting=True - ) + itk2lta = pe.Node(niu.Function(function=_itk2lta), name="itk2lta", run_without_submitting=True) sampler = pe.MapNode( fs.SampleToSurface( @@ -187,51 +186,101 @@ def select_target(subject_id, space): MedialNaNs(), iterfield=["in_file"], name="medial_nans", mem_gb=DEFAULT_MEMORY_MIN_GB ) + # Rename the source file to the output space to simplify naming later + prepend_hemi = pe.MapNode( + niu.Rename(format_string="%(hemi)s.target.func.gii"), + iterfield=["in_file", "hemi"], + name="prepend_hemi", + mem_gb=DEFAULT_MEMORY_MIN_GB, + run_without_submitting=True, + ) + prepend_hemi.inputs.hemi = ["lh", "rh"] + update_metadata = pe.MapNode( GiftiSetAnatomicalStructure(), iterfield=["in_file"], name="update_metadata", mem_gb=DEFAULT_MEMORY_MIN_GB, + run_without_submitting=True, ) outputnode = pe.JoinNode( niu.IdentityInterface(fields=["surfaces", "target"]), joinsource="itersource", name="outputnode", + run_without_submitting=True, ) + if project_goodvoxels: + workflow, combine_ribbon_vol_hemis = make_ribbon_file(workflow, mem_gb, inputnode) + workflow, goodvoxels_mask, ribbon_boldsrc_xfm = mask_outliers( + workflow, mem_gb, rename_src, combine_ribbon_vol_hemis + ) + workflow, apply_goodvoxels_ribbon_mask = project_vol_to_surf( + workflow, mem_gb, rename_src, goodvoxels_mask, ribbon_boldsrc_xfm + ) + + # If not applying goodvoxels, then get sampler.source_file from rename_src + # instead in connect_bold_surf_wf + apply_goodvx_or_rename = apply_goodvoxels_ribbon_mask + else: + apply_goodvx_or_rename = rename_src + + # fmt: off + workflow = connect_bold_surf_wf( + workflow, inputnode, outputnode, get_fsnative, itersource, targets, + rename_src, sampler, itk2lta, update_metadata, apply_goodvx_or_rename + ) + # fmt: on + return apply_med_surf_nans_if( + medial_surface_nan, workflow, inputnode, sampler, update_metadata, medial_nans + ) + + +def make_ribbon_file(workflow, mem_gb, inputnode): + """ + Parameters + ---------- + :param workflow: _type_, _description_ + :param mem_gb: _type_, _description_ + :param inputnode: _type_, _description_ + :return: _type_, _description_ + """ # 0, 1 = wm; 2, 3 = pial; 6, 7 = mid # note that order of lh / rh within each surf type is not guaranteed due to use - # of unsorted glob by FreeSurferSource prior, but we can do a sort with - # _sorted_by_basename to ensure consistent ordering + # of unsorted glob by FreeSurferSource prior, but we can do a sort + # to ensure consistent ordering select_wm = pe.Node( niu.Select(index=[0, 1]), name="select_wm", mem_gb=DEFAULT_MEMORY_MIN_GB, + run_without_submitting=True, ) select_pial = pe.Node( niu.Select(index=[2, 3]), name="select_pial", mem_gb=DEFAULT_MEMORY_MIN_GB, + run_without_submitting=True, ) select_midthick = pe.Node( niu.Select(index=[6, 7]), name="select_midthick", mem_gb=DEFAULT_MEMORY_MIN_GB, + run_without_submitting=True, ) create_wm_distvol = pe.MapNode( CreateSignedDistanceVolume(), - iterfield=["surface"], + iterfield=["surf_file"], name="create_wm_distvol", mem_gb=mem_gb, ) create_pial_distvol = pe.MapNode( CreateSignedDistanceVolume(), - iterfield=["surface"], + iterfield=["surf_file"], name="create_pial_distvol", mem_gb=mem_gb, ) @@ -268,6 +317,7 @@ def select_target(subject_id, space): niu.Split(splits=[1, 1]), name="split_wm_distvol", mem_gb=DEFAULT_MEMORY_MIN_GB, + run_without_submitting=True, ) merge_wm_distvol_no_flatten = pe.Node( @@ -275,10 +325,11 @@ def select_target(subject_id, space): no_flatten=True, name="merge_wm_distvol_no_flatten", mem_gb=DEFAULT_MEMORY_MIN_GB, + run_without_submitting=True, ) make_ribbon_vol = pe.MapNode( - fsl.maths.MultiImageMaths(op_string="-mas %s -mul 255 "), + fsl.maths.MultiImageMaths(op_string="-mas %s -mul 255"), iterfield=["in_file", "operand_files"], name="make_ribbon_vol", mem_gb=DEFAULT_MEMORY_MIN_GB, @@ -295,6 +346,7 @@ def select_target(subject_id, space): niu.Split(splits=[1, 1], squeeze=True), name="split_squeeze_ribbon", mem_gb=DEFAULT_MEMORY_MIN_GB, + run_without_submitting=True, ) combine_ribbon_vol_hemis = pe.Node( @@ -303,9 +355,38 @@ def select_target(subject_id, space): mem_gb=DEFAULT_MEMORY_MIN_GB, ) + # make HCP-style ribbon volume in T1w space + workflow.connect( + [ + (inputnode, select_wm, [("anat_giftis", "inlist")]), + (inputnode, select_pial, [("anat_giftis", "inlist")]), + (inputnode, select_midthick, [("anat_giftis", "inlist")]), + (select_wm, create_wm_distvol, [(("out", _sorted_by_basename), "surf_file")]), + (inputnode, create_wm_distvol, [("t1w_mask", "ref_file")]), + (select_pial, create_pial_distvol, [(("out", _sorted_by_basename), "surf_file")]), + (inputnode, create_pial_distvol, [("t1w_mask", "ref_file")]), + (create_wm_distvol, thresh_wm_distvol, [("out_file", "in_file")]), + (create_pial_distvol, uthresh_pial_distvol, [("out_file", "in_file")]), + (thresh_wm_distvol, bin_wm_distvol, [("out_file", "in_file")]), + (uthresh_pial_distvol, bin_pial_distvol, [("out_file", "in_file")]), + (bin_wm_distvol, split_wm_distvol, [("out_file", "inlist")]), + (split_wm_distvol, merge_wm_distvol_no_flatten, [("out1", "in1")]), + (split_wm_distvol, merge_wm_distvol_no_flatten, [("out2", "in2")]), + (bin_pial_distvol, make_ribbon_vol, [("out_file", "in_file")]), + (merge_wm_distvol_no_flatten, make_ribbon_vol, [("out", "operand_files")]), + (make_ribbon_vol, bin_ribbon_vol, [("out_file", "in_file")]), + (bin_ribbon_vol, split_squeeze_ribbon_vol, [("out_file", "inlist")]), + (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [("out1", "in_file")]), + (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [("out2", "operand_file")]), + ] + ) + + return workflow, combine_ribbon_vol_hemis + + +def mask_outliers(workflow, mem_gb, rename_src, combine_ribbon_vol_hemis): ribbon_boldsrc_xfm = pe.Node( - ApplyTransforms(interpolation='MultiLabel', - transforms='identity'), + ApplyTransforms(interpolation='MultiLabel', transforms='identity'), name="ribbon_boldsrc_xfm", mem_gb=mem_gb, ) @@ -335,13 +416,13 @@ def select_target(subject_id, space): ) cov_ribbon_mean = pe.Node( - fsl.ImageStats(op_string='-M '), + fsl.ImageStats(op_string='-M'), name="cov_ribbon_mean", mem_gb=DEFAULT_MEMORY_MIN_GB, ) cov_ribbon_std = pe.Node( - fsl.ImageStats(op_string='-S '), + fsl.ImageStats(op_string='-S'), name="cov_ribbon_std", mem_gb=DEFAULT_MEMORY_MIN_GB, ) @@ -353,7 +434,7 @@ def select_target(subject_id, space): ) smooth_norm = pe.Node( - fsl.maths.MathsCommand(args="-bin -s 5 "), + fsl.maths.MathsCommand(args="-bin -s 5"), name="smooth_norm", mem_gb=DEFAULT_MEMORY_MIN_GB, ) @@ -362,10 +443,11 @@ def select_target(subject_id, space): niu.Merge(1), name="merge_smooth_norm", mem_gb=DEFAULT_MEMORY_MIN_GB, + run_without_submitting=True, ) cov_ribbon_norm_smooth = pe.Node( - fsl.maths.MultiImageMaths(op_string='-s 5 -div %s -dilD '), + fsl.maths.MultiImageMaths(op_string='-s 5 -div %s -dilD'), name="cov_ribbon_norm_smooth", mem_gb=DEFAULT_MEMORY_MIN_GB, ) @@ -393,9 +475,7 @@ def _calc_upper_thr(in_stats): upper_thr_val = pe.Node( Function( - input_names=["in_stats"], - output_names=["upper_thresh"], - function=_calc_upper_thr + input_names=["in_stats"], output_names=["upper_thresh"], function=_calc_upper_thr ), name="upper_thr_val", mem_gb=DEFAULT_MEMORY_MIN_GB, @@ -406,22 +486,20 @@ def _calc_lower_thr(in_stats): lower_thr_val = pe.Node( Function( - input_names=["in_stats"], - output_names=["lower_thresh"], - function=_calc_lower_thr + input_names=["in_stats"], output_names=["lower_thresh"], function=_calc_lower_thr ), name="lower_thr_val", mem_gb=DEFAULT_MEMORY_MIN_GB, ) mod_ribbon_mean = pe.Node( - fsl.ImageStats(op_string='-M '), + fsl.ImageStats(op_string='-M'), name="mod_ribbon_mean", mem_gb=DEFAULT_MEMORY_MIN_GB, ) mod_ribbon_std = pe.Node( - fsl.ImageStats(op_string='-S '), + fsl.ImageStats(op_string='-S'), name="mod_ribbon_std", mem_gb=DEFAULT_MEMORY_MIN_GB, ) @@ -430,6 +508,7 @@ def _calc_lower_thr(in_stats): niu.Merge(2), name="merge_mod_ribbon_stats", mem_gb=DEFAULT_MEMORY_MIN_GB, + run_without_submitting=True, ) bin_mean_volume = pe.Node( @@ -442,6 +521,7 @@ def _calc_lower_thr(in_stats): niu.Merge(2), name="merge_goodvoxels_operands", mem_gb=DEFAULT_MEMORY_MIN_GB, + run_without_submitting=True, ) goodvoxels_thr = pe.Node( @@ -451,14 +531,72 @@ def _calc_lower_thr(in_stats): ) goodvoxels_mask = pe.Node( - fsl.maths.MultiImageMaths(op_string='-bin -sub %s -mul -1 '), + fsl.maths.MultiImageMaths(op_string='-bin -sub %s -mul -1'), name="goodvoxels_mask", mem_gb=mem_gb, ) - + + # make HCP-style "goodvoxels" mask in t1w space for filtering outlier voxels + # in bold timeseries, based on modulated normalized covariance + workflow.connect( + [ + (combine_ribbon_vol_hemis, ribbon_boldsrc_xfm, [("out_file", "input_image")]), + (rename_src, stdev_volume, [("out_file", "in_file")]), + (rename_src, mean_volume, [("out_file", "in_file")]), + (mean_volume, ribbon_boldsrc_xfm, [("out_file", "reference_image")]), + (stdev_volume, cov_volume, [("out_file", "in_file")]), + (mean_volume, cov_volume, [("out_file", "operand_file")]), + (cov_volume, cov_ribbon, [("out_file", "in_file")]), + (ribbon_boldsrc_xfm, cov_ribbon, [("output_image", "mask_file")]), + (cov_ribbon, cov_ribbon_mean, [("out_file", "in_file")]), + (cov_ribbon, cov_ribbon_std, [("out_file", "in_file")]), + (cov_ribbon, cov_ribbon_norm, [("out_file", "in_file")]), + (cov_ribbon_mean, cov_ribbon_norm, [("out_stat", "operand_value")]), + (cov_ribbon_norm, smooth_norm, [("out_file", "in_file")]), + (smooth_norm, merge_smooth_norm, [("out_file", "in1")]), + (cov_ribbon_norm, cov_ribbon_norm_smooth, [("out_file", "in_file")]), + (merge_smooth_norm, cov_ribbon_norm_smooth, [("out", "operand_files")]), + (cov_ribbon_mean, cov_norm, [("out_stat", "operand_value")]), + (cov_volume, cov_norm, [("out_file", "in_file")]), + (cov_norm, cov_norm_modulate, [("out_file", "in_file")]), + (cov_ribbon_norm_smooth, cov_norm_modulate, [("out_file", "operand_file")]), + (cov_norm_modulate, cov_norm_modulate_ribbon, [("out_file", "in_file")]), + (ribbon_boldsrc_xfm, cov_norm_modulate_ribbon, [("output_image", "mask_file")]), + (cov_norm_modulate_ribbon, mod_ribbon_mean, [("out_file", "in_file")]), + (cov_norm_modulate_ribbon, mod_ribbon_std, [("out_file", "in_file")]), + (mod_ribbon_mean, merge_mod_ribbon_stats, [("out_stat", "in1")]), + (mod_ribbon_std, merge_mod_ribbon_stats, [("out_stat", "in2")]), + (merge_mod_ribbon_stats, upper_thr_val, [("out", "in_stats")]), + (merge_mod_ribbon_stats, lower_thr_val, [("out", "in_stats")]), + (mean_volume, bin_mean_volume, [("out_file", "in_file")]), + (upper_thr_val, goodvoxels_thr, [("upper_thresh", "thresh")]), + (cov_norm_modulate, goodvoxels_thr, [("out_file", "in_file")]), + (bin_mean_volume, merge_goodvoxels_operands, [("out_file", "in1")]), + (goodvoxels_thr, goodvoxels_mask, [("out_file", "in_file")]), + (merge_goodvoxels_operands, goodvoxels_mask, [("out", "operand_files")]), + ] + ) + + return workflow, goodvoxels_mask, ribbon_boldsrc_xfm + + +def project_vol_to_surf(workflow, mem_gb, rename_src, goodvoxels_mask, ribbon_boldsrc_xfm): + """ + Parameters + ---------- + workflow : :obj: + mem_gb : :obj:`float` + Size of BOLD file in GB + rename_src: _type_, _description_ + goodvoxels_mask: _type_, _description_ + ribbon_boldsrc_xfm: _type_, _description_ + + :return: _type_, _description_ + """ goodvoxels_ribbon_mask = pe.Node( fsl.ApplyMask(), name_source=['in_file'], + keep_extension=True, name="goodvoxels_ribbon_mask", mem_gb=DEFAULT_MEMORY_MIN_GB, ) @@ -466,152 +604,87 @@ def _calc_lower_thr(in_stats): apply_goodvoxels_ribbon_mask = pe.Node( fsl.ApplyMask(), name_source=['in_file'], + keep_extension=True, name="apply_goodvoxels_ribbon_mask", mem_gb=mem_gb * 3, ) - if not project_goodvoxels: - # fmt: off - workflow.connect([ - (inputnode, get_fsnative, [('subject_id', 'subject_id'), - ('subjects_dir', 'subjects_dir')]), - (inputnode, targets, [('subject_id', 'subject_id')]), - (inputnode, rename_src, [('source_file', 'in_file')]), - (inputnode, itk2lta, [('source_file', 'src_file'), - ('t1w2fsnative_xfm', 'in_file')]), - (get_fsnative, itk2lta, [("T1", "dst_file")]), - (inputnode, sampler, [('subjects_dir', 'subjects_dir'), - ('subject_id', 'subject_id')]), - (itersource, targets, [('target', 'space')]), - (itersource, rename_src, [('target', 'subject')]), - (itk2lta, sampler, [('out', 'reg_file')]), - (targets, sampler, [('out', 'target_subject')]), - (rename_src, sampler, [('out_file', 'source_file')]), - (update_metadata, outputnode, [('out_file', 'surfaces')]), - (itersource, outputnode, [('target', 'target')]), - ]) - # fmt: on + # apply goodvoxels ribbon mask to bold + workflow.connect( + [ + (goodvoxels_mask, goodvoxels_ribbon_mask, [("out_file", "in_file")]), + (ribbon_boldsrc_xfm, goodvoxels_ribbon_mask, [("output_image", "mask_file")]), + (goodvoxels_ribbon_mask, apply_goodvoxels_ribbon_mask, [("out_file", "mask_file")]), + (rename_src, apply_goodvoxels_ribbon_mask, [("out_file", "in_file")]), + ] + ) + + return workflow, apply_goodvoxels_ribbon_mask + - if not medial_surface_nan: - workflow.connect(sampler, "out_file", update_metadata, "in_file") - return workflow +def apply_med_surf_nans_if( + medial_surface_nan, workflow, inputnode, sampler, update_metadata, medial_nans +): + """ + Parameters + ---------- + medial_surface_nan : :obj:`bool` + Replace medial wall values with NaNs on functional GIFTI files + workflow : _type_, _description_ + inputnode : _type_, _description_ + sampler : _type_, _description_ + update_metadata : _type_, _description_ + medial_nans : _type_, _description_ + :return: workflow, with medial surface NaNs applied if medial_surface_nan + """ + if medial_surface_nan: # fmt: off workflow.connect([ - (inputnode, medial_nans, [('subjects_dir', 'subjects_dir')]), - (sampler, medial_nans, [('out_file', 'in_file')]), - (medial_nans, update_metadata, [('out_file', 'in_file')]), + (inputnode, medial_nans, [("subjects_dir", "subjects_dir")]), + (sampler, medial_nans, [("out_file", "in_file")]), + (medial_nans, update_metadata, [("out_file", "in_file")]), ]) # fmt: on - return workflow - - # make HCP-style ribbon volume in T1w space - workflow.connect([ - (inputnode, select_wm, [("anat_giftis", "inlist")]), - (inputnode, select_pial, [("anat_giftis", "inlist")]), - (inputnode, select_midthick, [("anat_giftis", "inlist")]), - (select_wm, create_wm_distvol, [(("out", _sorted_by_basename), "surface")]), - (inputnode, create_wm_distvol, [("t1w_mask", "ref_space")]), - (select_pial, create_pial_distvol, [(("out", _sorted_by_basename), "surface")]), - (inputnode, create_pial_distvol, [("t1w_mask", "ref_space")]), - (create_wm_distvol, thresh_wm_distvol, [("out_vol", "in_file")]), - (create_pial_distvol, uthresh_pial_distvol, [("out_vol", "in_file")]), - (thresh_wm_distvol, bin_wm_distvol, [("out_file", "in_file")]), - (uthresh_pial_distvol, bin_pial_distvol, [("out_file", "in_file")]), - (bin_wm_distvol, split_wm_distvol, [("out_file", "inlist")]), - (split_wm_distvol, merge_wm_distvol_no_flatten, [("out1", "in1")]), - (split_wm_distvol, merge_wm_distvol_no_flatten, [("out2", "in2")]), - (bin_pial_distvol, make_ribbon_vol, [("out_file", "in_file")]), - (merge_wm_distvol_no_flatten, make_ribbon_vol, [("out", "operand_files")]), - (make_ribbon_vol, bin_ribbon_vol, [("out_file", "in_file")]), - (bin_ribbon_vol, split_squeeze_ribbon_vol, [("out_file", "inlist")]), - (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [("out1", "in_file")]), - (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [("out2", "operand_file")]), - ]) - - # make HCP-style "goodvoxels" mask in t1w space for filtering outlier voxels - # in bold timeseries, based on modulated normalized covariance - workflow.connect([ - (combine_ribbon_vol_hemis, ribbon_boldsrc_xfm, [("out_file", 'input_image')]), - (rename_src, stdev_volume, [("out_file", "in_file")]), - (rename_src, mean_volume, [("out_file", "in_file")]), - (mean_volume, ribbon_boldsrc_xfm, [('out_file', 'reference_image')]), - (stdev_volume, cov_volume, [("out_file", "in_file")]), - (mean_volume, cov_volume, [("out_file", "operand_file")]), - (cov_volume, cov_ribbon, [("out_file", "in_file")]), - (ribbon_boldsrc_xfm, cov_ribbon, [("output_image", "mask_file")]), - (cov_ribbon, cov_ribbon_mean, [("out_file", "in_file")]), - (cov_ribbon, cov_ribbon_std, [("out_file", "in_file")]), - (cov_ribbon, cov_ribbon_norm, [("out_file", "in_file")]), - (cov_ribbon_mean, cov_ribbon_norm, [("out_stat", "operand_value")]), - (cov_ribbon_norm, smooth_norm, [("out_file", "in_file")]), - (smooth_norm, merge_smooth_norm, [("out_file", "in1")]), - (cov_ribbon_norm, cov_ribbon_norm_smooth, [("out_file", "in_file")]), - (merge_smooth_norm, cov_ribbon_norm_smooth, [("out", "operand_files")]), - (cov_ribbon_mean, cov_norm, [("out_stat", "operand_value")]), - (cov_volume, cov_norm, [("out_file", "in_file")]), - (cov_norm, cov_norm_modulate, [("out_file", "in_file")]), - (cov_ribbon_norm_smooth, cov_norm_modulate, [("out_file", "operand_file")]), - (cov_norm_modulate, cov_norm_modulate_ribbon, [("out_file", "in_file")]), - (ribbon_boldsrc_xfm, cov_norm_modulate_ribbon, [("output_image", "mask_file")]), - (cov_norm_modulate_ribbon, mod_ribbon_mean, [("out_file", "in_file")]), - (cov_norm_modulate_ribbon, mod_ribbon_std, [("out_file", "in_file")]), - (mod_ribbon_mean, merge_mod_ribbon_stats, [("out_stat", "in1")]), - (mod_ribbon_std, merge_mod_ribbon_stats, [("out_stat", "in2")]), - (merge_mod_ribbon_stats, upper_thr_val, [("out", "in_stats")]), - (merge_mod_ribbon_stats, lower_thr_val, [("out", "in_stats")]), - (mean_volume, bin_mean_volume, [("out_file", "in_file")]), - (upper_thr_val, goodvoxels_thr, [("upper_thresh", "thresh")]), - (cov_norm_modulate, goodvoxels_thr, [("out_file", "in_file")]), - (bin_mean_volume, merge_goodvoxels_operands, [("out_file", "in1")]), - (goodvoxels_thr, goodvoxels_mask, [("out_file", "in_file")]), - (merge_goodvoxels_operands, goodvoxels_mask, [("out", "operand_files")]), - ]) - - # apply goodvoxels ribbon mask to bold - workflow.connect([ - (goodvoxels_mask, goodvoxels_ribbon_mask, [("out_file", "in_file")]), - (ribbon_boldsrc_xfm, goodvoxels_ribbon_mask, [("output_image", "mask_file")]), - (goodvoxels_ribbon_mask, apply_goodvoxels_ribbon_mask, [("out_file", "mask_file")]), - (rename_src, apply_goodvoxels_ribbon_mask, [("out_file", "in_file")]), - ]) - - # project masked bold to target surfs - workflow.connect([ - (inputnode, get_fsnative, [("subject_id", "subject_id"), - ("subjects_dir", "subjects_dir")]), - (inputnode, targets, [("subject_id", "subject_id")]), - (inputnode, rename_src, [("source_file", "in_file")]), - (inputnode, itk2lta, [("source_file", "src_file"), - ("t1w2fsnative_xfm", "in_file")]), - (get_fsnative, itk2lta, [("T1", "dst_file")]), - (inputnode, sampler, [("subjects_dir", "subjects_dir"), - ("subject_id", "subject_id")]), - (itersource, targets, [("target", "space")]), - (itersource, rename_src, [("target", "subject")]), - (itk2lta, sampler, [("out", "reg_file")]), - (targets, sampler, [("out", "target_subject")]), - (apply_goodvoxels_ribbon_mask, sampler, [("out_file", "source_file")]), - (update_metadata, outputnode, [("out_file", "surfaces")]), - (itersource, outputnode, [("target", "target")]), - ]) + else: + workflow.connect(sampler, "out_file", update_metadata, "in_file") + return workflow - # fmt:on - if not medial_surface_nan: - # fmt:off - workflow.connect([ - (sampler, update_metadata, [("out_file", "in_file")]), - ]) - # fmt:on - return workflow - # fmt:off - workflow.connect([ - (inputnode, medial_nans, [('subjects_dir', 'subjects_dir')]), - (sampler, medial_nans, [("out_file", "in_file")]), - (medial_nans, update_metadata, [("out_file", "in_file")]), - ]) - # fmt:on +def connect_bold_surf_wf( + workflow, + inputnode, + outputnode, + get_fsnative, + itersource, + targets, + rename_src, + sampler, + itk2lta, + update_metadata, + apply_goodvoxels_or_rename, +): + workflow.connect( + [ + ( + inputnode, + get_fsnative, + [("subject_id", "subject_id"), ("subjects_dir", "subjects_dir")], + ), + (inputnode, targets, [("subject_id", "subject_id")]), + (inputnode, rename_src, [("source_file", "in_file")]), + (inputnode, itk2lta, [("source_file", "src_file"), ("t1w2fsnative_xfm", "in_file")]), + (get_fsnative, itk2lta, [("brain", "dst_file")]), # InfantFS: Use brain instead of T1 + (inputnode, sampler, [("subjects_dir", "subjects_dir"), ("subject_id", "subject_id")]), + (itersource, targets, [("target", "space")]), + (itersource, rename_src, [("target", "subject")]), + (itk2lta, sampler, [("out", "reg_file")]), + (targets, sampler, [("out", "target_subject")]), + (apply_goodvoxels_or_rename, sampler, [("out_file", "source_file")]), + (update_metadata, outputnode, [("out_file", "surfaces")]), + (itersource, outputnode, [("target", "target")]), + ] + ) return workflow From 8bf03e234fdcf0f79096feb70a627458d9c673d7 Mon Sep 17 00:00:00 2001 From: madisoth Date: Thu, 15 Dec 2022 12:40:45 -0600 Subject: [PATCH 13/14] refactor to handle anat_ribbon_wf moving to sMRIPrep --- fmriprep/interfaces/workbench.py | 1572 ++++++++++++++++++++++++++++++ 1 file changed, 1572 insertions(+) create mode 100644 fmriprep/interfaces/workbench.py diff --git a/fmriprep/interfaces/workbench.py b/fmriprep/interfaces/workbench.py new file mode 100644 index 000000000..b717f1db4 --- /dev/null +++ b/fmriprep/interfaces/workbench.py @@ -0,0 +1,1572 @@ +import os + +from nipype.interfaces.base import CommandLineInputSpec, File, Str, TraitedSpec, traits +from nipype.interfaces.base.traits_extension import ( + InputMultiObject, + OutputMultiObject, + isdefined, +) +from nipype.interfaces.workbench.base import WBCommand + +# patch +from nipype.interfaces.workbench.cifti import CiftiSmooth as _CiftiSmooth +from nipype.interfaces.workbench.cifti import ( + CiftiSmoothInputSpec as _CiftiSmoothInputSpec, +) + +VALID_STRUCTURES = ( + "CORTEX_LEFT", + "CORTEX_RIGHT", + "CEREBELLUM", + "ACCUMBENS_LEFT", + "ACCUMBENS_RIGHT", + "ALL_GREY_MATTER", + "ALL_WHITE_MATTER", + "AMYGDALA_LEFT", + "AMYGDALA_RIGHT", + "BRAIN_STEM", + "CAUDATE_LEFT", + "CAUDATE_RIGHT", + "CEREBELLAR_WHITE_MATTER_LEFT", + "CEREBELLAR_WHITE_MATTER_RIGHT", + "CEREBELLUM_LEFT", + "CEREBELLUM_RIGHT", + "CEREBRAL_WHITE_MATTER_LEFT", + "CEREBRAL_WHITE_MATTER_RIGHT", + "CORTEX", + "DIENCEPHALON_VENTRAL_LEFT", + "DIENCEPHALON_VENTRAL_RIGHT", + "HIPPOCAMPUS_LEFT", + "HIPPOCAMPUS_RIGHT", + "INVALID", + "OTHER", + "OTHER_GREY_MATTER", + "OTHER_WHITE_MATTER", + "PALLIDUM_LEFT", + "PALLIDUM_RIGHT", + "PUTAMEN_LEFT", + "PUTAMEN_RIGHT", + "THALAMUS_LEFT", + "THALAMUS_RIGHT", +) + + +class CiftiCreateDenseFromTemplateInputSpec(CommandLineInputSpec): + in_file = File( + exists=True, + mandatory=True, + argstr="%s", + position=0, + desc="File to match brainordinates of", + ) + out_file = File( + name_source=["in_file"], + name_template="template_%s.nii", + keep_extension=True, + argstr="%s", + position=1, + desc="The output CIFTI file", + ) + series = traits.Bool( + argstr="-series", + position=2, + desc="Make a dtseries file instead of a dscalar", + ) + series_step = traits.Float( + requires=["series"], + argstr="%.1f", + position=3, + desc="Increment between series points", + ) + series_start = traits.Float( + requires=["series"], + argstr="%.1f", + position=4, + desc="Start value of the series", + ) + series_unit = traits.Enum( + "SECOND", + "HERTZ", + "METER", + "RADIAN", + requries=["series"], + argstr="-unit %s", + position=5, + desc="select unit for series (default SECOND)", + ) + volume_all = traits.File( + exists=True, + argstr="-volume-all %s", + position=6, + desc="the input volume file for all voxel data", + ) + volume_all_from_cropped = traits.Bool( + requires=["volume_all"], + argstr="-from-cropped", + position=7, + desc="the input is cropped to the size of the voxel data in the template file", + ) + label_collision = traits.Enum( + "ERROR", + "SURFACES_FIRST", + "LEGACY", + argstr="-label-collision %s", + position=8, + desc="how to handle conflicts between label keys, use 'LEGACY' to match v1.4.2 " + "and earlier", + ) + cifti = InputMultiObject( + File(exists=True), + argstr="-cifti %s", + position=9, + desc="use input data from one or more CIFTI files", + ) + metric = InputMultiObject( + traits.Tuple(traits.Enum(VALID_STRUCTURES), File(exists=True)), + argstr="%s", + position=10, + desc="use input data from one or more metric files", + ) + label = InputMultiObject( + traits.Tuple(traits.Enum(VALID_STRUCTURES), File(exists=True)), + argstr="%s", + position=11, + desc="use input data from one or more surface label files", + ) + volume = InputMultiObject( + traits.Either( + traits.Tuple(traits.Enum(VALID_STRUCTURES), File(exists=True)), + traits.Tuple(traits.Enum(VALID_STRUCTURES), File(exists=True), traits.Bool()), + ), + argstr="%s", + position=12, + desc="use a volume file for a single volume structure's data", + ) + + +class CiftiCreateDenseFromTemplateOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="The output CIFTI file") + + +class CiftiCreateDenseFromTemplate(WBCommand): + """ + This command helps you make a new dscalar, dtseries, or dlabel cifti file + that matches the brainordinate space used in another cifti file. The + template file must have the desired brainordinate space in the mapping + along the column direction (for dtseries, dscalar, dlabel, and symmetric + dconn this is always the case). All input cifti files must have a brain + models mapping along column and use the same volume space and/or surface + vertex count as the template for structures that they contain. If any + input files contain label data, then input files with non-label data are + not allowed, and the -series option may not be used. + + Any structure that isn't covered by an input is filled with zeros or the + unlabeled key. + + >>> from nibabies.interfaces import workbench as wb + >>> frmtpl = wb.CiftiCreateDenseFromTemplate() + >>> frmtpl.inputs.in_file = data_dir / "func.dtseries.nii" + >>> frmtpl.inputs.series = True + >>> frmtpl.inputs.series_step = 0.8 + >>> frmtpl.inputs.series_start = 0 + >>> frmtpl.cmdline + 'wb_command -cifti-create-dense-from-template .../func.dtseries.nii \ + template_func.dtseries.nii -series 0.8 0.0' + + >>> frmtpl.inputs.volume = [("OTHER", data_dir / 'functional.nii', True), \ + ("PUTAMEN_LEFT", data_dir / 'functional.nii')] + >>> frmtpl.cmdline + 'wb_command -cifti-create-dense-from-template .../func.dtseries.nii \ + template_func.dtseries.nii -series 0.8 0.0 \ + -volume OTHER .../functional.nii -from-cropped \ + -volume PUTAMEN_LEFT .../functional.nii' + """ + + input_spec = CiftiCreateDenseFromTemplateInputSpec + output_spec = CiftiCreateDenseFromTemplateOutputSpec + _cmd = "wb_command -cifti-create-dense-from-template" + + def _format_arg(self, name, trait_spec, value): + if name in ("metric", "label", "volume"): + cmds = [] + for val in value: + if val[-1] is True: # volume specific + val = val[:2] + ("-from-cropped",) + cmds.append(" ".join((f"-{name}",) + val)) + return trait_spec.argstr % " ".join(cmds) + return super()._format_arg(name, trait_spec, value) + + +class CiftiCreateDenseTimeseriesInputSpec(CommandLineInputSpec): + out_file = File( + value="out.dtseries.nii", + usedefault=True, + argstr="%s", + position=0, + desc="The output CIFTI file", + ) + volume_data = File( + exists=True, + argstr="-volume %s", + position=1, + requires=["volume_structure_labels"], + desc="volume file containing all voxel data for all volume structures", + ) + volume_structure_labels = File( + exists=True, + argstr="%s", + position=2, + requires=["volume_data"], + desc="label volume file containing labels for cifti structures", + ) + left_metric = File( + exists=True, + argstr="-left-metric %s", + position=3, + desc="metric file for left surface", + ) + roi_left = File( + exists=True, + argstr="-roi-left %s", + position=4, + requires=["left_metric"], + desc="ROI (as metric file) of vertices to use from left surface", + ) + right_metric = File( + exists=True, + argstr="-right-metric %s", + position=5, + desc="metric file for right surface", + ) + roi_right = File( + exists=True, + argstr="-roi-right %s", + position=6, + requires=["right_metric"], + desc="ROI (as metric file) of vertices to use from right surface", + ) + cerebellum_metric = File( + exists=True, + argstr="-cerebellum-metric %s", + position=7, + desc="metric file for cerebellum", + ) + roi_cerebellum = File( + exists=True, + argstr="-roi-cerebellum %s", + position=8, + requires=["cerebellum_metric"], + desc="ROI (as metric file) of vertices to use from cerebellum", + ) + timestart = traits.Float( + argstr="-timestart %g", + position=9, + desc="the time at the first frame, in seconds", + ) + timestep = traits.Float( + argstr="-timestep %g", + position=10, + desc="the timestep, in seconds", + ) + unit = traits.Enum( + "SECOND", + "HERTZ", + "METER", + "RADIAN", + argstr="-unit %s", + position=11, + desc="use a unit other than time", + ) + + +class CiftiCreateDenseTimeseriesOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="CIFTI dense timeseries file") + + +class CiftiCreateDenseTimeseries(WBCommand): + """ + Create a CIFTI dense timeseries. + + All input files must have the same number of columns/subvolumes. Only + the specified components will be in the output cifti. At least one + component must be specified. + + See -volume-label-import and -volume-help for format details of label + volume files. The structure-label-volume should have some of the label + names from this list, all other label names will be ignored: + + CORTEX_LEFT + CORTEX_RIGHT + CEREBELLUM + ACCUMBENS_LEFT + ACCUMBENS_RIGHT + ALL_GREY_MATTER + ALL_WHITE_MATTER + AMYGDALA_LEFT + AMYGDALA_RIGHT + BRAIN_STEM + CAUDATE_LEFT + CAUDATE_RIGHT + CEREBELLAR_WHITE_MATTER_LEFT + CEREBELLAR_WHITE_MATTER_RIGHT + CEREBELLUM_LEFT + CEREBELLUM_RIGHT + CEREBRAL_WHITE_MATTER_LEFT + CEREBRAL_WHITE_MATTER_RIGHT + CORTEX + DIENCEPHALON_VENTRAL_LEFT + DIENCEPHALON_VENTRAL_RIGHT + HIPPOCAMPUS_LEFT + HIPPOCAMPUS_RIGHT + INVALID + OTHER + OTHER_GREY_MATTER + OTHER_WHITE_MATTER + PALLIDUM_LEFT + PALLIDUM_RIGHT + PUTAMEN_LEFT + PUTAMEN_RIGHT + THALAMUS_LEFT + THALAMUS_RIGHT + + >>> from nibabies.interfaces.workbench import CiftiCreateDenseTimeseries + >>> createdts = CiftiCreateDenseTimeseries() + >>> createdts.inputs.volume_data = data_dir /'functional.nii' + >>> createdts.inputs.volume_structure_labels = data_dir / 'atlas.nii' + >>> createdts.cmdline + 'wb_command -cifti-create-dense-timeseries out.dtseries.nii \ + -volume .../functional.nii .../atlas.nii' + """ + + input_spec = CiftiCreateDenseTimeseriesInputSpec + output_spec = CiftiCreateDenseTimeseriesOutputSpec + _cmd = "wb_command -cifti-create-dense-timeseries" + + def _list_outputs(self): + outputs = self.output_spec().get() + outputs["out_file"] = os.path.abspath(self.inputs.out_file) + return outputs + + +class CiftiCreateLabelInputSpec(CommandLineInputSpec): + out_file = File( + value="out.dlabel.nii", + usedefault=True, + argstr="%s", + position=0, + desc="the output CIFTI file", + ) + volume_label = File( + exists=True, + requires=["structure_label_volume"], + argstr="-volume %s", + position=1, + desc="label volume file containing the data to be copied", + ) + structure_label_volume = File( + exists=True, + requires=["volume_label"], + argstr="%s", + position=2, + desc="label volume file that defines which voxels to use", + ) + left_label = File( + exists=True, + argstr="-left-label %s", + position=3, + desc="Label file for left surface", + ) + left_roi = File( + exists=True, + requires=["left_label"], + argstr="-roi-left %s", + position=4, + desc="roi of vertices to use from left surface as a metric file", + ) + right_label = File( + exists=True, + argstr="-right-label %s", + position=5, + desc="Label file for right surface", + ) + right_roi = File( + exists=True, + requires=["right_label"], + argstr="-roi-right %s", + position=6, + desc="roi of vertices to use from right surface as a metric file", + ) + cerebellum_label = File( + exists=True, + argstr="-cerebellum-label %s", + position=7, + desc="label for the cerebellum", + ) + cerebellum_roi = File( + exists=True, + requires=["cerebellum_label"], + argstr="-roi-cerebellum %s", + position=8, + desc="roi of vertices to use from cerebellum", + ) + + +class CiftiCreateLabelOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="the output CIFTI file") + + +class CiftiCreateLabel(WBCommand): + """ + All input files must have the same number of columns/subvolumes. Only + the specified components will be in the output cifti. At least one + component must be specified. + + The -volume option requires two volume arguments, the label-volume + argument contains all labels you want to display (e.g. nuclei of the + thalamus), whereas the structure-label-volume argument contains all CIFTI + voxel-based structures you want to include data within (e.g. + THALAMUS_LEFT, THALAMUS_RIGHT, etc). See -volume-label-import and + -volume-help for format details of label volume files. If you just want + the labels in voxels to be the structure names, you may use the same file + for both arguments. The structure-label-volume must use some of the + label names from this list, all other label names in the + structure-label-volume will be ignored: + + CORTEX_LEFT + CORTEX_RIGHT + CEREBELLUM + ACCUMBENS_LEFT + ACCUMBENS_RIGHT + ALL_GREY_MATTER + ALL_WHITE_MATTER + AMYGDALA_LEFT + AMYGDALA_RIGHT + BRAIN_STEM + CAUDATE_LEFT + CAUDATE_RIGHT + CEREBELLAR_WHITE_MATTER_LEFT + CEREBELLAR_WHITE_MATTER_RIGHT + CEREBELLUM_LEFT + CEREBELLUM_RIGHT + CEREBRAL_WHITE_MATTER_LEFT + CEREBRAL_WHITE_MATTER_RIGHT + CORTEX + DIENCEPHALON_VENTRAL_LEFT + DIENCEPHALON_VENTRAL_RIGHT + HIPPOCAMPUS_LEFT + HIPPOCAMPUS_RIGHT + INVALID + OTHER + OTHER_GREY_MATTER + OTHER_WHITE_MATTER + PALLIDUM_LEFT + PALLIDUM_RIGHT + PUTAMEN_LEFT + PUTAMEN_RIGHT + THALAMUS_LEFT + THALAMUS_RIGHT + + >>> from nibabies.interfaces import workbench as wb + >>> lab = wb.CiftiCreateLabel() + >>> lab.inputs.volume_label = data_dir / "functional.nii" + >>> lab.inputs.structure_label_volume = data_dir / "functional.nii" + >>> lab.cmdline + 'wb_command -cifti-create-label out.dlabel.nii -volume .../functional.nii .../functional.nii' + """ + + input_spec = CiftiCreateLabelInputSpec + output_spec = CiftiCreateLabelOutputSpec + _cmd = "wb_command -cifti-create-label" + + def _list_outputs(self): + outputs = self.output_spec().get() + outputs["out_file"] = os.path.abspath(self.inputs.out_file) + return outputs + + +class CiftiDilateInputSpec(CommandLineInputSpec): + in_file = File( + exists=True, + mandatory=True, + argstr="%s", + position=0, + desc="The input CIFTI file", + ) + direction = traits.Enum( + "ROW", + "COLUMN", + mandatory=True, + argstr="%s", + position=1, + desc="Which dimension to dilate along, ROW or COLUMN", + ) + surface_distance = traits.Int( + mandatory=True, + argstr="%d", + position=2, + desc="The distance to dilate on surfaces, in mm", + ) + volume_distance = traits.Int( + mandatory=True, + argstr="%d", + position=3, + desc="The distance to dilate in the volume, in mm", + ) + out_file = File( + name_source=["in_file"], + name_template="dilated_%s.nii", + keep_extension=True, + argstr="%s", + position=4, + desc="The dilated CIFTI file", + ) + left_surface = File( + exists=True, + position=5, + argstr="-left-surface %s", + desc="Specify the left surface to use", + ) + left_corrected_areas = File( + exists=True, + position=6, + requires=["left_surface"], + argstr="-left-corrected-areas %s", + desc="vertex areas (as a metric) to use instead of computing them from the left surface.", + ) + right_surface = File( + exists=True, + position=7, + argstr="-right-surface %s", + desc="Specify the right surface to use", + ) + right_corrected_areas = File( + exists=True, + position=8, + requires=["right_surface"], + argstr="-right-corrected-areas %s", + desc="vertex areas (as a metric) to use instead of computing them from the right surface", + ) + cerebellum_surface = File( + exists=True, + position=9, + argstr="-cerebellum-surface %s", + desc="specify the cerebellum surface to use", + ) + cerebellum_corrected_areas = File( + exists=True, + position=10, + requires=["cerebellum_surface"], + argstr="-cerebellum-corrected-areas %s", + desc="vertex areas (as a metric) to use instead of computing them from the cerebellum " + "surface", + ) + bad_brainordinate_roi = File( + exists=True, + position=11, + argstr="-bad-brainordinate-roi %s", + desc="CIFTI dscalar or dtseries file, positive values denote brainordinates to have their " + "values replaced", + ) + nearest = traits.Bool( + position=12, + argstr="-nearest", + desc="Use nearest good value instead of a weighted average", + ) + merged_volume = traits.Bool( + position=13, + argstr="-merged-volume", + desc="treat volume components as if they were a single component", + ) + legacy_mode = traits.Bool( + position=14, + argstr="-legacy-mode", + desc="Use the math from v1.3.2 and earlier for weighted dilation", + ) + + +class CiftiDilateOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="Dilated CIFTI file") + + +class CiftiDilate(WBCommand): + """ + Dilate a CIFTI file. + + For all data values designated as bad, if they neighbor a good value or + are within the specified distance of a good value in the same kind of + model, replace the value with a distance weighted average of nearby good + values, otherwise set the value to zero. If -nearest is specified, it + will use the value from the closest good value within range instead of a + weighted average. When the input file contains label data, nearest + dilation is used on the surface, and weighted popularity is used in the + volume. + + The -*-corrected-areas options are intended for dilating on group average + surfaces, but it is only an approximate correction for the reduction of + structure in a group average surface. + + If -bad-brainordinate-roi is specified, all values, including those with + value zero, are good, except for locations with a positive value in the + ROI. If it is not specified, only values equal to zero are bad. + + """ + + input_spec = CiftiDilateInputSpec + output_spec = CiftiDilateOutputSpec + _cmd = "wb_command -cifti-dilate" + + +class CiftiResampleInputSpec(CommandLineInputSpec): + in_file = File( + exists=True, + mandatory=True, + argstr="%s", + position=0, + desc="the CIFTI file to resample", + ) + direction = traits.Enum( + "ROW", + "COLUMN", + mandatory=True, + argstr="%s", + position=1, + desc="the direction of the input that should be resampled", + ) + template = File( + exists=True, + mandatory=True, + argstr="%s", + position=2, + desc="a CIFTI file containing the CIFTI space to resample to", + ) + template_direction = traits.Enum( + "ROW", + "COLUMN", + mandatory=True, + argstr="%s", + position=3, + desc="the direction of the template to use as the resampling space", + ) + surface_method = traits.Enum( + "ADAP_BARY_AREA", + "BARYCENTRIC", + mandatory=True, + argstr="%s", + position=4, + desc="surface resampling method", + ) + volume_method = traits.Enum( + "CUBIC", + "ENCLOSING_VOXEL", + "TRILINEAR", + mandatory=True, + argstr="%s", + position=5, + desc="volume interpolation method", + ) + out_file = File( + name_source=["in_file"], + keep_extension=True, + name_template="resampled_%s.nii", + argstr="%s", + position=6, + desc="the output cifti file", + ) + surface_largest = traits.Bool( + argstr="-surface-largest", + position=7, + desc="use largest weight instead of weighted average or popularity when doing " + "surface resampling", + ) + volume_predilate = traits.Int( + argstr="-volume-predilate %d", + position=8, + desc="distance, in mm, to dilate the volume components before resampling", + ) + volume_predilate_nearest = traits.Bool( + requires=["volume_predilate"], + xor=["volume_predilate_weighted"], + argstr="-nearest", + position=9, + desc="use nearest value dilation", + ) + volume_predilate_weighted = traits.Bool( + requires=["volume_predilate"], + xor=["volume_predilate_nearest"], + argstr="-weighted", + position=10, + desc="use weighted dilation (default)", + ) + volume_predilate_weighted_exponent = traits.Int( + requires=["volume_predilate_weighted"], + argstr="-exponent %d", + position=11, + desc="exponent 'n' to use in (1 / (distance ^ n)) as the weighting function (default 7)", + ) + volume_predilate_weighted_legacy = traits.Bool( + requires=["volume_predilate_weighted"], + argstr="-legacy-cutoff", + position=12, + desc="use v1.3.2 logic for the kernel cutoff", + ) + surface_postdilate = traits.Int( + argstr="-surface-postdilate %d", + position=13, + desc="distance, in mm, to dilate the surface components after resampling", + ) + surface_postdilate_nearest = traits.Bool( + requires=["surface_postdilate"], + xor=["surface_postdilate_weighted", "surface_postdilate_linear"], + argstr="-nearest", + position=14, + desc="use nearest value dilation", + ) + surface_postdilate_linear = traits.Bool( + requires=["surface_postdilate"], + xor=["surface_postdilate_weighted", "surface_postdilate_nearest"], + argstr="-linear", + position=15, + desc="use linear dilation", + ) + surface_postdilate_weighted = traits.Bool( + requires=["surface_postdilate"], + xor=["surface_postdilate_nearest", "surface_postdilate_linear"], + argstr="-weighted", + position=16, + desc="use weighted dilation (default for non-label data)", + ) + surface_postdilate_weighted_exponent = traits.Int( + requires=["surface_postdilate_weighted"], + argstr="-exponent %d", + position=17, + desc="exponent 'n' to use in (area / (distance ^ n)) as the weighting function " + "(default 6)", + ) + surface_postdilate_weighted_legacy = traits.Bool( + requires=["surface_postdilate_weighted"], + argstr="-legacy-cutoff", + position=18, + desc="use v1.3.2 logic for the kernel cutoff", + ) + affine = File( + exists=True, + argstr="-affine %s", + position=19, + desc="affine file for transformation on the volume components", + ) + affine_flirt_source = File( + exists=True, + requires=["affine", "affine_flirt_target"], + argstr="-flirt %s", + position=20, + desc="the source volume used when generating the affine. MUST be used if affine is " + "a flirt affine", + ) + affine_flirt_target = File( + exists=True, + requires=["affine", "affine_flirt_source"], + argstr="%s", + position=21, + desc="the target volume used when generating the affine. MUST be used if affine is " + "a flirt affine", + ) + warpfield = File( + exists=True, + argstr="-warpfield %s", + position=22, + desc="the warpfield to use on the volume components", + ) + warpfield_fnirt_source = File( + exists=True, + requires=["warpfield"], + argstr="-fnirt %s", + position=23, + desc="the source volume used when generating the warpfield. MUST be used if using " + "a fnirt warpfield", + ) + left_sphere_current = File( + exists=True, + requires=["left_sphere_new"], + argstr="-left-spheres %s", + position=24, + desc="a sphere with the same mesh as the current left surface", + ) + left_sphere_new = File( + exists=True, + requires=["left_sphere_current"], + argstr="%s", + position=25, + desc="a sphere with the new left mesh that is in register with the current sphere", + ) + left_area_surf_current = File( + exists=True, + requires=["left_sphere_current", "left_area_surf_new"], + argstr="-left-area-surfs %s", + position=26, + desc="a relevant left anatomical surface with current mesh", + ) + left_area_surf_new = File( + exists=True, + requires=["left_sphere_new", "left_area_surf_current"], + argstr="%s", + position=27, + desc="a relevant left anatomical surface with new mesh", + ) + left_area_metric_current = File( + exists=True, + requires=["left_sphere_current", "left_area_metric_new"], + argstr="-left-area-metrics %s", + position=28, + desc="a metric file with vertex areas for the current mesh", + ) + left_area_metric_new = File( + exists=True, + requires=["left_sphere_new", "left_area_metric_current"], + argstr="%s", + position=29, + desc="a metric file with vertex areas for the new mesh", + ) + right_sphere_current = File( + exists=True, + requires=["right_sphere_new"], + argstr="-right-spheres %s", + position=30, + desc="a sphere with the same mesh as the current right surface", + ) + right_sphere_new = File( + exists=True, + requires=["right_sphere_current"], + argstr="%s", + position=31, + desc="a sphere with the new right mesh that is in register with the current sphere", + ) + right_area_surf_current = File( + exists=True, + requires=["right_sphere_current", "right_area_surf_new"], + argstr="-right-area-surfs %s", + position=32, + desc="a relevant right anatomical surface with current mesh", + ) + right_area_surf_new = File( + exists=True, + requires=["right_sphere_new", "right_area_surf_current"], + argstr="%s", + position=33, + desc="a relevant right anatomical surface with new mesh", + ) + right_area_metric_current = File( + exists=True, + requires=["right_sphere_current", "right_area_metric_new"], + argstr="-right-area-metrics %s", + position=34, + desc="a metric file with vertex areas for the current mesh", + ) + right_area_metric_new = File( + exists=True, + requires=["right_sphere_new", "right_area_metric_current"], + argstr="%s", + position=35, + desc="a metric file with vertex areas for the new mesh", + ) + cerebellum_sphere_current = File( + exists=True, + requires=["cerebellum_sphere_new"], + argstr="-cerebellum-spheres %s", + position=36, + desc="a sphere with the same mesh as the current cerebellum surface", + ) + cerebellum_sphere_new = File( + exists=True, + requires=["cerebellum_sphere_current"], + argstr="%s", + position=37, + desc="a sphere with the new cerebellum mesh that is in register with the current sphere", + ) + cerebellum_area_surf_current = File( + exists=True, + requires=["cerebellum_sphere_current", "cerebellum_area_surf_new"], + argstr="-cerebellum-area-surfs %s", + position=38, + desc="a relevant cerebellum anatomical surface with current mesh", + ) + cerebellum_area_surf_new = File( + exists=True, + requires=["cerebellum_sphere_new", "cerebellum_area_surf_current"], + argstr="%s", + position=39, + desc="a relevant cerebellum anatomical surface with new mesh", + ) + cerebellum_area_metric_current = File( + exists=True, + requires=["cerebellum_sphere_current", "cerebellum_area_metric_new"], + argstr="-cerebellum-area-metrics %s", + position=40, + desc="a metric file with vertex areas for the current mesh", + ) + cerebellum_area_metric_new = File( + exists=True, + requires=["cerebellum_sphere_new", "cerebellum_area_metric_current"], + argstr="%s", + position=41, + desc="a metric file with vertex areas for the new mesh", + ) + + +class CiftiResampleOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="the resampled CIFTI file") + + +class CiftiResample(WBCommand): + """ + Resample cifti data to a different brainordinate space. Use COLUMN for + the direction to resample dscalar, dlabel, or dtseries. Resampling both + dimensions of a dconn requires running this command twice, once with + COLUMN and once with ROW. If you are resampling a dconn and your machine + has a large amount of memory, you might consider using + -cifti-resample-dconn-memory to avoid writing and rereading an + intermediate file. The argument should usually be + COLUMN, as dtseries, dscalar, and dlabel all have brainordinates on that + direction. If spheres are not specified for a surface structure which + exists in the cifti files, its data is copied without resampling or + dilation. Dilation is done with the 'nearest' method, and is done on + for surface data. Volume components are padded before + dilation so that dilation doesn't run into the edge of the component + bounding box. If neither -affine nor -warpfield are specified, the + identity transform is assumed for the volume data. + + The recommended resampling methods are ADAP_BARY_AREA and CUBIC (cubic + spline), except for label data which should use ADAP_BARY_AREA and + ENCLOSING_VOXEL. Using ADAP_BARY_AREA requires specifying an area option + to each used -*-spheres option. + + >>> from nibabies.interfaces import workbench as wb + >>> res = wb.CiftiResample() + >>> res.inputs.in_file = data_dir / "func.dtseries.nii" + >>> res.inputs.direction = "COLUMN" + >>> res.inputs.template = data_dir / "func.dlabel.nii" + >>> res.inputs.template_direction = "COLUMN" + >>> res.inputs.surface_method = "ADAP_BARY_AREA" + >>> res.inputs.volume_method = "CUBIC" + >>> res.inputs.out_file = "resampled.dtseries.nii" + >>> res.inputs.volume_predilate = 10 + >>> res.cmdline + 'wb_command -cifti-resample .../func.dtseries.nii COLUMN .../func.dlabel.nii COLUMN \ + ADAP_BARY_AREA CUBIC resampled.dtseries.nii -volume-predilate 10' + """ + + input_spec = CiftiResampleInputSpec + output_spec = CiftiResampleOutputSpec + _cmd = "wb_command -cifti-resample" + + +class CiftiSeparateInputSpec(CommandLineInputSpec): + in_file = File( + exists=True, + mandatory=True, + argstr="%s", + position=0, + desc="the cifti to separate a component of", + ) + direction = traits.Enum( + "ROW", + "COLUMN", + mandatory=True, + argstr="%s", + position=1, + desc="which dimension to smooth along, ROW or COLUMN", + ) + volume_all_file = File( + argstr="-volume-all %s", + position=2, + desc="separate all volume structures into a volume file", + ) + volume_all_roi_file = File( + argstr="-roi %s", + position=3, + requires=["volume_all_file"], + desc="output the roi of which voxels have data", + ) + volume_all_label_file = File( + argstr="-label %s", + position=4, + requires=["volume_all_file"], + desc="output a volume label file indicating the location of structures", + ) + volume_all_crop = traits.Bool( + argstr="-crop", + position=5, + requires=["volume_all_file"], + desc="crop volume to the size of the data rather than using the original volume size", + ) + # the following can be repeated + label = InputMultiObject( + traits.Either( + traits.Tuple(traits.Enum(VALID_STRUCTURES), File()), + traits.Tuple(traits.Enum(VALID_STRUCTURES), File(), File()), + ), + argstr="%s", + position=6, + desc="separate one or more surface models into a surface label file", + ) + metric = InputMultiObject( + traits.Either( + traits.Tuple(traits.Enum(VALID_STRUCTURES), File()), + traits.Tuple(traits.Enum(VALID_STRUCTURES), File(), File()), # -roi + ), + argstr="%s", + position=7, + desc="separate one or more surface models into a metric file", + ) + volume = InputMultiObject( + traits.Either( + traits.Tuple(traits.Enum(VALID_STRUCTURES), File()), + traits.Tuple(traits.Enum(VALID_STRUCTURES), File(), File()), # -roi + traits.Tuple(traits.Enum(VALID_STRUCTURES, File(), traits.Bool)), # -crop + traits.Tuple(traits.Enum(VALID_STRUCTURES), File(), File(), traits.Bool), # -roi -crop + ), + argstr="%s", + position=8, + desc="separate one or more volume structures into a volume file", + ) + + +class CiftiSeparateOutputSpec(TraitedSpec): + volume_all_file = File(desc="File containing all volume structures") + volume_all_roi_file = File(desc="Output the roi of which voxels have data") + volume_all_label_file = File( + desc="output a volume label file indicating the location of structures" + ) + label_files = OutputMultiObject(File(), desc="Output label files") + label_roi_files = OutputMultiObject(File(), desc="Output label rois files") + metric_files = OutputMultiObject(File(), desc="Output metric files") + metric_roi_files = OutputMultiObject(File(), desc="Output metric rois files") + volume_files = OutputMultiObject(File(), desc="Output volume files") + volume_roi_files = OutputMultiObject(File(), desc="Output volume roi files") + + +class CiftiSeparate(WBCommand): + """ + Extract left or right hemisphere surface from CIFTI file (.dtseries) + other structure can also be extracted + The input cifti file must have a brain models mapping on the chosen + dimension, columns for .dtseries. + + >>> separate = CiftiSeparate() + >>> separate.inputs.in_file = data_dir / "func.dtseries.nii" + >>> separate.inputs.direction = "COLUMN" + >>> separate.inputs.volume_all_file = "volume_all.nii.gz" + >>> separate.cmdline + 'wb_command -cifti-separate .../func.dtseries.nii COLUMN \ + -volume-all volume_all.nii.gz' + + Metrics, labels, and volumes can also be freely extracted + >>> separate.inputs.metric = [("CORTEX_LEFT", "cortexleft.func.gii")] + >>> separate.inputs.volume = [("HIPPOCAMPUS_LEFT", "hippoL.nii.gz"), \ + ("HIPPOCAMPUS_RIGHT", "hippoR.nii.gz", "hippoR.roi.nii.gz")] + >>> separate.cmdline + 'wb_command -cifti-separate .../func.dtseries.nii COLUMN \ + -volume-all volume_all.nii.gz -metric CORTEX_LEFT cortexleft.func.gii \ + -volume HIPPOCAMPUS_LEFT hippoL.nii.gz \ + -volume HIPPOCAMPUS_RIGHT hippoR.nii.gz -roi hippoR.roi.nii.gz' + + """ + + input_spec = CiftiSeparateInputSpec + output_spec = CiftiSeparateOutputSpec + _cmd = "wb_command -cifti-separate" + _label_roi_files = [] + _metric_roi_files = [] + _volume_roi_files = [] + + def _format_arg(self, name, trait_spec, value): + if name in ("label", "metric", "volume"): + cmds = [] + for i, val in enumerate(value): + if len(val) == 3: + if val[-1] is True: + val = val[:-1] + ("-crop",) + else: + val = val[:-1] + ("-roi", val[-1]) + self._set_roi_file(name, val[-1]) + elif len(val) == 4: + val = val[:-2] + ("-roi", val[-2]) + ("crop") if val[-1] is True else () + self._set_roi_file(name, val[-2]) + cmds.append(" ".join((f"-{name}",) + val)) + return trait_spec.argstr % " ".join(cmds) + return super()._format_arg(name, trait_spec, value) + + def _list_outputs(self): + outputs = self.output_spec().get() + if self.inputs.volume_all_file: + outputs["volume_all_file"] = os.path.abspath(self.inputs.volume_all_file) + if self.inputs.volume_all_roi_file: + outputs["volume_all_roi_file"] = os.path.abspath(self.inputs.volume_all_roi_file) + if self.inputs.volume_all_label_file: + outputs["volume_all_label_file"] = os.path.abspath(self.inputs.volume_all_label_file) + if self.inputs.label: + for label in self.inputs.label: + outputs["label_files"] = (outputs["label_files"] or []) + self._gen_filename( + label[2] + ) + if self._label_roi_files: + outputs["label_roi_files"] = self._label_roi_files + if self.inputs.metric: + for metric in self.inputs.metric: + outputs["metric_files"] = (outputs["metric_files"] or []) + self._gen_filename( + metric[2] + ) + if self._metric_roi_files: + outputs["metric_roi_files"] = self._metric_roi_files + if self.inputs.volume: + for volume in self.inputs.volume: + outputs["volume_files"] = (outputs["volume_files"] or []) + self._gen_filename( + volume[2] + ) + if self._volume_roi_files: + outputs["volume_roi_files"] = self._volume_roi_files + return outputs + + def _set_roi_file(self, name, file): + rois = getattr(self, f"_{name}_roi_files") + rois.append(self._gen_filename(file)) + + +class CiftiSmoothInputSpec(_CiftiSmoothInputSpec): + left_surf = File( + exists=True, + position=5, + argstr="-left-surface %s", + desc="Specify the left surface to use", + ) + right_surf = File( + exists=True, + position=7, + argstr="-right-surface %s", + desc="Specify the right surface to use", + ) + + +class CiftiSmooth(_CiftiSmooth): + input_spec = CiftiSmoothInputSpec + + +class VolumeAffineResampleInputSpec(CommandLineInputSpec): + in_file = File( + exists=True, + mandatory=True, + argstr="%s", + position=0, + desc="volume to resample", + ) + volume_space = File( + exists=True, + mandatory=True, + argstr="%s", + position=1, + desc="a volume file in the volume space you want for the output", + ) + method = traits.Enum( + "CUBIC", + "ENCLOSING_VOXEL", + "TRILINEAR", + mandatory=True, + argstr="%s", + position=2, + desc="The resampling method. The recommended methods are CUBIC " + "(cubic spline) for most data, and ENCLOSING_VOXEL for label data.", + ) + out_file = File( + name_source=["in_file"], + name_template="resampled_%s.nii.gz", + keep_extension=True, + argstr="%s", + position=3, + desc="the output volume", + ) + affine = File( + exists=True, + mandatory=True, + argstr="-affine %s", + position=4, + desc="the affine file to apply", + ) + flirt = traits.Bool( + argstr="-flirt %s %s", + position=5, + desc="Set ``True`` if ``affine`` is a FLIRT affine.", + ) + flirt_source_volume = File( + exists=True, + desc="the source volume used when generating the affine; defaults to in_file", + requires=["flirt"], + ) + flirt_target_volume = File( + exists=True, + desc="the target volume used when generating the affine; defaults to volume_space", + requires=["flirt"], + ) + + +class VolumeAffineResampleOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="the output volume") + + +class VolumeAffineResample(WBCommand): + """ + Resample a volume file with an affine transformation. + + >>> from nibabies.interfaces.workbench import VolumeAffineResample + >>> resample = VolumeAffineResample() + >>> resample.inputs.in_file = data_dir /'functional.nii' + >>> resample.inputs.volume_space = data_dir /'anatomical.nii' + >>> resample.inputs.method = 'CUBIC' + >>> resample.inputs.affine = data_dir / 'func_to_struct.mat' + >>> resample.cmdline + 'wb_command -volume-resample .../functional.nii .../anatomical.nii CUBIC \ + resampled_functional.nii.gz -affine .../func_to_struct.mat' + + If the affine was generated with FLIRT, this should be indicated. + By default, the interface will use the ``in_file`` and ``volume_space`` + for references. + + >>> resample.inputs.flirt = True + >>> resample.cmdline + 'wb_command -volume-resample .../functional.nii .../anatomical.nii CUBIC \ + resampled_functional.nii.gz -affine .../func_to_struct.mat \ + -flirt .../functional.nii .../anatomical.nii' + + However, if other volumes were used to calculate the affine, they can + be provided: + + >>> resample.inputs.flirt_source_volume = data_dir / 'epi.nii' + >>> resample.inputs.flirt_target_volume = data_dir /'T1w.nii' + >>> resample.cmdline + 'wb_command -volume-resample .../functional.nii .../anatomical.nii CUBIC \ + resampled_functional.nii.gz -affine .../func_to_struct.mat \ + -flirt .../epi.nii .../T1w.nii' + """ + + input_spec = VolumeAffineResampleInputSpec + output_spec = VolumeAffineResampleOutputSpec + _cmd = "wb_command -volume-resample" + + def _format_arg(self, opt, spec, val): + if opt == "flirt" and val: + val = ( + self.inputs.flirt_source_volume or self.inputs.in_file, + self.inputs.flirt_target_volume or self.inputs.volume_space, + ) + return super()._format_arg(opt, spec, val) + + +class VolumeAllLabelsToROIsInputSpec(CommandLineInputSpec): + in_file = File( + exists=True, + mandatory=True, + argstr="%s", + position=0, + desc="the input volume label file", + ) + label_map = traits.Either( + traits.Int, + Str, + mandatory=True, + argstr="%s", + position=1, + desc="the number or name of the label map to use", + ) + out_file = File( + name_source=["in_file"], + name_template="%s_rois.nii.gz", + argstr="%s", + position=2, + desc="the output volume", + ) + + +class VolumeAllLabelsToROIsOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="the output volume") + + +class VolumeAllLabelsToROIs(WBCommand): + """ + Make ROIs from all labels in a volume frame + + The output volume has a frame for each label in the specified input + frame, other than the ??? label, each of which contains an ROI of all + voxels that are set to the corresponding label. + + >>> from nibabies.interfaces.workbench import VolumeAllLabelsToROIs + >>> rois = VolumeAllLabelsToROIs() + >>> rois.inputs.in_file = data_dir / 'atlas.nii' + >>> rois.inputs.label_map = 1 + >>> rois.cmdline + 'wb_command -volume-all-labels-to-rois .../atlas.nii 1 atlas_rois.nii.gz' + """ + + input_spec = VolumeAllLabelsToROIsInputSpec + output_spec = VolumeAllLabelsToROIsOutputSpec + _cmd = "wb_command -volume-all-labels-to-rois" + + +class VolumeLabelExportTableInputSpec(CommandLineInputSpec): + in_file = File( + exists=True, + mandatory=True, + argstr="%s", + position=0, + desc="the input volume label file", + ) + label_map = traits.Either( + traits.Int, + Str, + mandatory=True, + argstr="%s", + position=1, + desc="the number or name of the label map to use", + ) + out_file = File( + name_source=["in_file"], + name_template="%s_labels.txt", + argstr="%s", + position=2, + desc="the output text file", + ) + + +class VolumeLabelExportTableOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="the output text file") + + +class VolumeLabelExportTable(WBCommand): + """ + Export label table from volume as text + + Takes the label table from the volume label map, and writes it to a text + format matching what is expected by -volume-label-import. + + >>> from nibabies.interfaces.workbench import VolumeLabelExportTable + >>> label_export = VolumeLabelExportTable() + >>> label_export.inputs.in_file = data_dir / 'atlas.nii' + >>> label_export.inputs.label_map = 1 + >>> label_export.cmdline + 'wb_command -volume-label-export-table .../atlas.nii 1 atlas_labels.txt' + """ + + input_spec = VolumeLabelExportTableInputSpec + output_spec = VolumeLabelExportTableOutputSpec + _cmd = "wb_command -volume-label-export-table" + + +class VolumeLabelImportInputSpec(CommandLineInputSpec): + in_file = File( + exists=True, + mandatory=True, + argstr="%s", + position=0, + desc="the input volume file", + ) + label_list_file = File( + exists=True, + mandatory=True, + argstr="%s", + position=1, + desc="text file containing the values and names for labels", + ) + out_file = File( + name_source=["in_file"], + name_template="%s_labels.nii.gz", + argstr="%s", + position=2, + desc="the output workbench label volume", + ) + discard_others = traits.Bool( + argstr="-discard-others", + desc="set any voxels with values not mentioned in the label list to the ??? label", + ) + unlabeled_values = traits.Int( + argstr="-unlabeled-value %d", + desc="the value that will be interpreted as unlabeled", + ) + subvolume = traits.Either( + traits.Int, + Str, + argstr="-subvolume %s", + desc="select a single subvolume to import (number or name)", + ) + drop_unused_labels = traits.Bool( + argstr="-drop-unused-labels", + desc="remove any unused label values from the label table", + ) + + +class VolumeLabelImportOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="the output workbench label volume") + + +class VolumeLabelImport(WBCommand): + """ + Import a label volume to workbench format + + Creates a label volume from an integer-valued volume file. The label + name and color information is stored in the volume header in a nifti + extension, with a similar format as in caret5, see -volume-help. You may + specify the empty string (use "") for , which will be + treated as if it is an empty file. The label list file must have the + following format (2 lines per label): + + + + ... + + Label names are specified on a separate line from their value and color, + in order to let label names contain spaces. Whitespace is trimmed from + both ends of the label name, but is kept if it is in the middle of a + label. Do not specify the "unlabeled" key in the file, it is assumed + that 0 means not labeled unless -unlabeled-value is specified. The value + of specifies what value in the imported file should be used as this + label. The values of , , and must be integers + from 0 to 255, and will specify the color the label is drawn as (alpha of + 255 means fully opaque, which is probably what you want). + + By default, it will create new label names with names like LABEL_5 for + any values encountered that are not mentioned in the list file, specify + -discard-others to instead set these values to the "unlabeled" key. + + >>> from nibabies.interfaces.workbench import VolumeLabelImport + >>> label_import = VolumeLabelImport() + >>> label_import.inputs.in_file = data_dir / 'atlas.nii' + >>> label_import.inputs.label_list_file = data_dir / 'label_list.txt' + >>> label_import.cmdline + 'wb_command -volume-label-import .../atlas.nii .../label_list.txt \ + atlas_labels.nii.gz' + """ + + input_spec = VolumeLabelImportInputSpec + output_spec = VolumeLabelImportOutputSpec + _cmd = "wb_command -volume-label-import" + + +class CreateSignedDistanceVolumeInputSpec(CommandLineInputSpec): + surf_file = File( + exists=True, + mandatory=True, + argstr="%s", + position=0, + desc="Input surface GIFTI file (.surf.gii)", + ) + ref_file = File( + exists=True, + mandatory=True, + argstr="%s", + position=1, + desc="NIfTI volume in the desired output space (dims, spacing, origin)", + ) + out_file = File( + name_source=["surf_file"], + name_template="%s_distvol.nii.gz", + argstr="%s", + position=2, + desc="Name of output volume containing signed distances", + ) + out_mask = File( + name_source=["surf_file"], + name_template="%s_distmask.nii.gz", + argstr="-roi-out %s", + desc="Name of file to store a mask where the ``out_file`` has a computed value", + ) + fill_value = traits.Float( + 0.0, + mandatory=False, + usedefault=True, + argstr="-fill-value %f", + desc="value to put in all voxels that don't get assigned a distance", + ) + exact_limit = traits.Float( + 5.0, + usedefault=True, + argstr="-exact-limit %f", + desc="distance for exact output in mm", + ) + approx_limit = traits.Float( + 20.0, + usedefault=True, + argstr="-approx-limit %f", + desc="distance for approximate output in mm", + ) + approx_neighborhood = traits.Int( + 2, + usedefault=True, + argstr="-approx-neighborhood %d", + desc="size of neighborhood cube measured from center to face in voxels, default 2 = 5x5x5", + ) + winding_method = traits.Enum( + "EVEN_ODD", + "NEGATIVE", + "NONZERO", + "NORMALS", + argstr="-winding %s", + usedefault=True, + desc="winding method for point inside surface test", + ) + + +class CreateSignedDistanceVolumeOutputSpec(TraitedSpec): + out_file = File(desc="Name of output volume containing signed distances") + out_mask = File( + desc="Name of file to store a mask where the ``out_file`` has a computed value" + ) + + +class CreateSignedDistanceVolume(WBCommand): + """CREATE SIGNED DISTANCE VOLUME FROM SURFACE + wb_command -create-signed-distance-volume + - the input surface + - a volume in the desired output space (dims, spacing, origin) + - output - the output volume + + [-roi-out] - output an roi volume of where the output has a computed + value + - output - the output roi volume + + [-fill-value] - specify a value to put in all voxels that don't get + assigned a distance + - value to fill with (default 0) + + [-exact-limit] - specify distance for exact output + - distance in mm (default 5) + + [-approx-limit] - specify distance for approximate output + - distance in mm (default 20) + + [-approx-neighborhood] - voxel neighborhood for approximate calculation + - size of neighborhood cube measured from center to face, in + voxels (default 2 = 5x5x5) + + [-winding] - winding method for point inside surface test + - name of the method (default EVEN_ODD) + + Computes the signed distance function of the surface. Exact distance is + calculated by finding the closest point on any surface triangle to the + center of the voxel. Approximate distance is calculated starting with + these distances, using dijkstra's method with a neighborhood of voxels. + Specifying too small of an exact distance may produce unexpected results. + Valid specifiers for winding methods are as follows: + + EVEN_ODD (default) + NEGATIVE + NONZERO + NORMALS + + The NORMALS method uses the normals of triangles and edges, or the + closest triangle hit by a ray from the point. This method may be + slightly faster, but is only reliable for a closed surface that does not + cross through itself. All other methods count entry (positive) and exit + (negative) crossings of a vertical ray from the point, then counts as + inside if the total is odd, negative, or nonzero, respectively. + + """ + + input_spec = CreateSignedDistanceVolumeInputSpec + output_spec = CreateSignedDistanceVolumeOutputSpec + _cmd = "wb_command -create-signed-distance-volume" From 97e2b79eb272b4994e5c40411dc7a4dacade05e6 Mon Sep 17 00:00:00 2001 From: madisoth Date: Thu, 15 Dec 2022 12:41:15 -0600 Subject: [PATCH 14/14] refactor to handle anat_ribbon_wf moving to sMRIPrep --- fmriprep/workflows/base.py | 2 +- fmriprep/workflows/bold/base.py | 4 +- fmriprep/workflows/bold/resampling.py | 717 +++++++++----------------- 3 files changed, 254 insertions(+), 469 deletions(-) diff --git a/fmriprep/workflows/base.py b/fmriprep/workflows/base.py index 5bfc9db8f..f74fbe782 100644 --- a/fmriprep/workflows/base.py +++ b/fmriprep/workflows/base.py @@ -390,7 +390,7 @@ def init_single_subject_wf(subject_id): # Undefined if --fs-no-reconall, but this is safe ('outputnode.subjects_dir', 'inputnode.subjects_dir'), ('outputnode.subject_id', 'inputnode.subject_id'), - ('outputnode.surfaces', 'inputnode.anat_giftis'), + ('outputnode.anat_ribbon', 'inputnode.anat_ribbon'), ('outputnode.t1w2fsnative_xfm', 'inputnode.t1w2fsnative_xfm'), ('outputnode.fsnative2t1w_xfm', 'inputnode.fsnative2t1w_xfm')]), ]) diff --git a/fmriprep/workflows/bold/base.py b/fmriprep/workflows/bold/base.py index 1c7762a8f..e47ebcb31 100644 --- a/fmriprep/workflows/bold/base.py +++ b/fmriprep/workflows/bold/base.py @@ -351,7 +351,7 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): "anat2std_xfm", "std2anat_xfm", "template", - "anat_giftis", + "anat_ribbon", "t1w2fsnative_xfm", "fsnative2t1w_xfm", "fmap", @@ -956,7 +956,7 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): ("subjects_dir", "inputnode.subjects_dir"), ("subject_id", "inputnode.subject_id"), ("t1w2fsnative_xfm", "inputnode.t1w2fsnative_xfm"), - ("anat_giftis", "inputnode.anat_giftis"), + ("anat_ribbon", "inputnode.anat_ribbon"), ("t1w_mask", "inputnode.t1w_mask"), ]), (bold_t1_trans_wf, bold_surf_wf, [("outputnode.bold_t1", "inputnode.source_file")]), diff --git a/fmriprep/workflows/bold/resampling.py b/fmriprep/workflows/bold/resampling.py index 4839cd6a0..fec2c4682 100644 --- a/fmriprep/workflows/bold/resampling.py +++ b/fmriprep/workflows/bold/resampling.py @@ -1,25 +1,5 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -# -# Copyright 2022 The NiPreps Developers -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# We support and encourage derived works from this project, please read -# about our expectations at -# -# https://www.nipreps.org/community/licensing/ -# """ Resampling workflows ++++++++++++++++++++ @@ -29,23 +9,25 @@ .. autofunction:: init_bold_preproc_trans_wf """ -from ctypes import create_string_buffer -from ...config import DEFAULT_MEMORY_MIN_GB + +from os.path import basename + +import nipype.interfaces.workbench as wb from nipype import Function +from nipype.interfaces import freesurfer as fs +from nipype.interfaces import fsl +from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe -from nipype.interfaces import utility as niu, freesurfer as fs, fsl -import nipype.interfaces.workbench as wb from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms from niworkflows.interfaces.freesurfer import MedialNaNs -from ...interfaces.volume import CreateSignedDistanceVolume + +from ...config import DEFAULT_MEMORY_MIN_GB -def init_bold_surf_wf(mem_gb, - surface_spaces, - medial_surface_nan, - project_goodvoxels, - name="bold_surf_wf"): +def init_bold_surf_wf( + mem_gb, surface_spaces, medial_surface_nan, project_goodvoxels, name="bold_surf_wf" +): """ Sample functional images to FreeSurfer surfaces. @@ -83,9 +65,7 @@ def init_bold_surf_wf(mem_gb, Inputs ------ source_file - Motion-corrected BOLD series in T1 space - t1w_mask - Mask of the skull-stripped T1w image + Motion-corrected BOLD series in T1w space subjects_dir FreeSurfer SUBJECTS_DIR subject_id @@ -94,6 +74,8 @@ def init_bold_surf_wf(mem_gb, LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space anat_giftis GIFTI anatomical surfaces in T1w space + t1w_mask + Mask of the skull-stripped T1w image Outputs ------- @@ -118,7 +100,7 @@ def init_bold_surf_wf(mem_gb, if project_goodvoxels: workflow.__desc__ += """\ Before resampling, a "goodvoxels" mask [@hcppipelines] was applied, -excluding voxels whose time-series have a locally high coetfficient of +excluding voxels whose time-series have a locally high coefficient of variation, or that lie outside the cortical surfaces, from the surface projection. """ @@ -130,7 +112,7 @@ def init_bold_surf_wf(mem_gb, "subject_id", "subjects_dir", "t1w2fsnative_xfm", - "anat_giftis", + "anat_ribbon", "t1w_mask", ] ), @@ -212,179 +194,82 @@ def select_target(subject_id, space): ) if project_goodvoxels: - workflow, combine_ribbon_vol_hemis = make_ribbon_file(workflow, mem_gb, inputnode) - workflow, goodvoxels_mask, ribbon_boldsrc_xfm = mask_outliers( - workflow, mem_gb, rename_src, combine_ribbon_vol_hemis - ) - workflow, apply_goodvoxels_ribbon_mask = project_vol_to_surf( - workflow, mem_gb, rename_src, goodvoxels_mask, ribbon_boldsrc_xfm - ) + goodvoxels_bold_mask_wf = init_goodvoxels_bold_mask_wf(mem_gb) - # If not applying goodvoxels, then get sampler.source_file from rename_src - # instead in connect_bold_surf_wf - apply_goodvx_or_rename = apply_goodvoxels_ribbon_mask + # fmt: off + workflow.connect([ + (inputnode, goodvoxels_bold_mask_wf, [("source_file", "inputnode.bold_file"), + ("anat_ribbon", "inputnode.anat_ribbon")]), + (goodvoxels_bold_mask_wf, rename_src, [("outputnode.masked_bold", "in_file")]), + ]) + # fmt: on else: - apply_goodvx_or_rename = rename_src + # fmt: off + workflow.connect([ + (inputnode, rename_src, [("source_file", "in_file")]), + ]) + # fmt: on # fmt: off - workflow = connect_bold_surf_wf( - workflow, inputnode, outputnode, get_fsnative, itersource, targets, - rename_src, sampler, itk2lta, update_metadata, apply_goodvx_or_rename + workflow.connect( + [ + ( + inputnode, + get_fsnative, + [("subject_id", "subject_id"), ("subjects_dir", "subjects_dir")], + ), + (inputnode, targets, [("subject_id", "subject_id")]), + (inputnode, itk2lta, [("source_file", "src_file"), ("t1w2fsnative_xfm", "in_file")]), + (get_fsnative, itk2lta, [("brain", "dst_file")]), # InfantFS: Use brain instead of T1 + (inputnode, sampler, [("subjects_dir", "subjects_dir"), ("subject_id", "subject_id")]), + (itersource, targets, [("target", "space")]), + (itersource, rename_src, [("target", "subject")]), + (rename_src, sampler, [("out_file", "source_file")]), + (itk2lta, sampler, [("out", "reg_file")]), + (targets, sampler, [("out", "target_subject")]), + (update_metadata, outputnode, [("out_file", "surfaces")]), + (itersource, outputnode, [("target", "target")]), + ] ) # fmt: on - return apply_med_surf_nans_if( - medial_surface_nan, workflow, inputnode, sampler, update_metadata, medial_nans - ) - - -def make_ribbon_file(workflow, mem_gb, inputnode): - """ - Parameters - ---------- - :param workflow: _type_, _description_ - :param mem_gb: _type_, _description_ - :param inputnode: _type_, _description_ - :return: _type_, _description_ - """ - # 0, 1 = wm; 2, 3 = pial; 6, 7 = mid - # note that order of lh / rh within each surf type is not guaranteed due to use - # of unsorted glob by FreeSurferSource prior, but we can do a sort - # to ensure consistent ordering - select_wm = pe.Node( - niu.Select(index=[0, 1]), - name="select_wm", - mem_gb=DEFAULT_MEMORY_MIN_GB, - run_without_submitting=True, - ) - - select_pial = pe.Node( - niu.Select(index=[2, 3]), - name="select_pial", - mem_gb=DEFAULT_MEMORY_MIN_GB, - run_without_submitting=True, - ) - - select_midthick = pe.Node( - niu.Select(index=[6, 7]), - name="select_midthick", - mem_gb=DEFAULT_MEMORY_MIN_GB, - run_without_submitting=True, - ) - - create_wm_distvol = pe.MapNode( - CreateSignedDistanceVolume(), - iterfield=["surf_file"], - name="create_wm_distvol", - mem_gb=mem_gb, - ) - - create_pial_distvol = pe.MapNode( - CreateSignedDistanceVolume(), - iterfield=["surf_file"], - name="create_pial_distvol", - mem_gb=mem_gb, - ) - - thresh_wm_distvol = pe.MapNode( - fsl.maths.MathsCommand(args="-thr 0 -bin -mul 255"), - iterfield=["in_file"], - name="thresh_wm_distvol", - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - - uthresh_pial_distvol = pe.MapNode( - fsl.maths.MathsCommand(args="-uthr 0 -abs -bin -mul 255"), - iterfield=["in_file"], - name="uthresh_pial_distvol", - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - - bin_wm_distvol = pe.MapNode( - fsl.maths.UnaryMaths(operation="bin"), - iterfield=["in_file"], - name="bin_wm_distvol", - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - - bin_pial_distvol = pe.MapNode( - fsl.maths.UnaryMaths(operation="bin"), - iterfield=["in_file"], - name="bin_pial_distvol", - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - - split_wm_distvol = pe.Node( - niu.Split(splits=[1, 1]), - name="split_wm_distvol", - mem_gb=DEFAULT_MEMORY_MIN_GB, - run_without_submitting=True, - ) - merge_wm_distvol_no_flatten = pe.Node( - niu.Merge(2), - no_flatten=True, - name="merge_wm_distvol_no_flatten", - mem_gb=DEFAULT_MEMORY_MIN_GB, - run_without_submitting=True, - ) + if medial_surface_nan: + # fmt: off + workflow.connect([ + (inputnode, medial_nans, [("subjects_dir", "subjects_dir")]), + (sampler, medial_nans, [("out_file", "in_file")]), + (medial_nans, update_metadata, [("out_file", "in_file")]), + ]) + # fmt: on + else: + # fmt: off + workflow.connect(sampler, "out_file", update_metadata, "in_file") + # fmt: on - make_ribbon_vol = pe.MapNode( - fsl.maths.MultiImageMaths(op_string="-mas %s -mul 255"), - iterfield=["in_file", "operand_files"], - name="make_ribbon_vol", - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) + return workflow - bin_ribbon_vol = pe.MapNode( - fsl.maths.UnaryMaths(operation="bin"), - iterfield=["in_file"], - name="bin_ribbon_vol", - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - split_squeeze_ribbon_vol = pe.Node( - niu.Split(splits=[1, 1], squeeze=True), - name="split_squeeze_ribbon", - mem_gb=DEFAULT_MEMORY_MIN_GB, - run_without_submitting=True, - ) +def init_goodvoxels_bold_mask_wf(mem_gb, name="goodvoxels_bold_mask_wf"): + workflow = pe.Workflow(name=name) - combine_ribbon_vol_hemis = pe.Node( - fsl.maths.BinaryMaths(operation="add"), - name="combine_ribbon_vol_hemis", - mem_gb=DEFAULT_MEMORY_MIN_GB, + inputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "anat_ribbon", + "bold_file", + ] + ), + name="inputnode", ) - - # make HCP-style ribbon volume in T1w space - workflow.connect( - [ - (inputnode, select_wm, [("anat_giftis", "inlist")]), - (inputnode, select_pial, [("anat_giftis", "inlist")]), - (inputnode, select_midthick, [("anat_giftis", "inlist")]), - (select_wm, create_wm_distvol, [(("out", _sorted_by_basename), "surf_file")]), - (inputnode, create_wm_distvol, [("t1w_mask", "ref_file")]), - (select_pial, create_pial_distvol, [(("out", _sorted_by_basename), "surf_file")]), - (inputnode, create_pial_distvol, [("t1w_mask", "ref_file")]), - (create_wm_distvol, thresh_wm_distvol, [("out_file", "in_file")]), - (create_pial_distvol, uthresh_pial_distvol, [("out_file", "in_file")]), - (thresh_wm_distvol, bin_wm_distvol, [("out_file", "in_file")]), - (uthresh_pial_distvol, bin_pial_distvol, [("out_file", "in_file")]), - (bin_wm_distvol, split_wm_distvol, [("out_file", "inlist")]), - (split_wm_distvol, merge_wm_distvol_no_flatten, [("out1", "in1")]), - (split_wm_distvol, merge_wm_distvol_no_flatten, [("out2", "in2")]), - (bin_pial_distvol, make_ribbon_vol, [("out_file", "in_file")]), - (merge_wm_distvol_no_flatten, make_ribbon_vol, [("out", "operand_files")]), - (make_ribbon_vol, bin_ribbon_vol, [("out_file", "in_file")]), - (bin_ribbon_vol, split_squeeze_ribbon_vol, [("out_file", "inlist")]), - (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [("out1", "in_file")]), - (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [("out2", "operand_file")]), - ] + outputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "masked_bold", + "goodvoxels_ribbon", + ] + ), + name="outputnode", ) - - return workflow, combine_ribbon_vol_hemis - - -def mask_outliers(workflow, mem_gb, rename_src, combine_ribbon_vol_hemis): ribbon_boldsrc_xfm = pe.Node( ApplyTransforms(interpolation='MultiLabel', transforms='identity'), name="ribbon_boldsrc_xfm", @@ -540,9 +425,9 @@ def _calc_lower_thr(in_stats): # in bold timeseries, based on modulated normalized covariance workflow.connect( [ - (combine_ribbon_vol_hemis, ribbon_boldsrc_xfm, [("out_file", "input_image")]), - (rename_src, stdev_volume, [("out_file", "in_file")]), - (rename_src, mean_volume, [("out_file", "in_file")]), + (inputnode, ribbon_boldsrc_xfm, [("anat_ribbon", "input_image")]), + (inputnode, stdev_volume, [("bold_file", "in_file")]), + (inputnode, mean_volume, [("bold_file", "in_file")]), (mean_volume, ribbon_boldsrc_xfm, [("out_file", "reference_image")]), (stdev_volume, cov_volume, [("out_file", "in_file")]), (mean_volume, cov_volume, [("out_file", "operand_file")]), @@ -577,22 +462,6 @@ def _calc_lower_thr(in_stats): ] ) - return workflow, goodvoxels_mask, ribbon_boldsrc_xfm - - -def project_vol_to_surf(workflow, mem_gb, rename_src, goodvoxels_mask, ribbon_boldsrc_xfm): - """ - Parameters - ---------- - workflow : :obj: - mem_gb : :obj:`float` - Size of BOLD file in GB - rename_src: _type_, _description_ - goodvoxels_mask: _type_, _description_ - ribbon_boldsrc_xfm: _type_, _description_ - - :return: _type_, _description_ - """ goodvoxels_ribbon_mask = pe.Node( fsl.ApplyMask(), name_source=['in_file'], @@ -615,40 +484,13 @@ def project_vol_to_surf(workflow, mem_gb, rename_src, goodvoxels_mask, ribbon_bo (goodvoxels_mask, goodvoxels_ribbon_mask, [("out_file", "in_file")]), (ribbon_boldsrc_xfm, goodvoxels_ribbon_mask, [("output_image", "mask_file")]), (goodvoxels_ribbon_mask, apply_goodvoxels_ribbon_mask, [("out_file", "mask_file")]), - (rename_src, apply_goodvoxels_ribbon_mask, [("out_file", "in_file")]), + (inputnode, apply_goodvoxels_ribbon_mask, [("bold_file", "in_file")]), + (apply_goodvoxels_ribbon_mask, outputnode, [("out_file", "masked_bold")]), + (goodvoxels_ribbon_mask, outputnode, [("out_file", "goodvoxels_ribbon")]), ] ) - return workflow, apply_goodvoxels_ribbon_mask - - -def apply_med_surf_nans_if( - medial_surface_nan, workflow, inputnode, sampler, update_metadata, medial_nans -): - """ - Parameters - ---------- - medial_surface_nan : :obj:`bool` - Replace medial wall values with NaNs on functional GIFTI files - workflow : _type_, _description_ - inputnode : _type_, _description_ - sampler : _type_, _description_ - update_metadata : _type_, _description_ - medial_nans : _type_, _description_ - - :return: workflow, with medial surface NaNs applied if medial_surface_nan - """ - if medial_surface_nan: - # fmt: off - workflow.connect([ - (inputnode, medial_nans, [("subjects_dir", "subjects_dir")]), - (sampler, medial_nans, [("out_file", "in_file")]), - (medial_nans, update_metadata, [("out_file", "in_file")]), - ]) - # fmt: on - else: - workflow.connect(sampler, "out_file", update_metadata, "in_file") - return workflow + return workflow # , apply_goodvoxels_ribbon_mask def connect_bold_surf_wf( @@ -693,7 +535,6 @@ def init_bold_std_trans_wf( mem_gb, omp_nthreads, spaces, - multiecho, name="bold_std_trans_wf", use_compression=True, ): @@ -720,8 +561,8 @@ def init_bold_std_trans_wf( mem_gb=3, omp_nthreads=1, spaces=SpatialReferences( - spaces=["MNI152Lin", - ("MNIPediatricAsym", {"cohort": "6"})], + spaces=['MNI152Lin', + ('MNIPediatricAsym', {'cohort': '6'})], checkpoint=True), ) @@ -740,7 +581,7 @@ def init_bold_std_trans_wf( (e.g., ``MNI152Lin``, ``MNI152NLin6Asym``, ``MNIPediatricAsym``), nonstandard references (e.g., ``T1w`` or ``anat``, ``sbref``, ``run``, etc.), or a custom template located in the TemplateFlow root directory. Each ``Reference`` may also contain a spec, which is a - dictionary with template specifications (e.g., a specification of ``{"resolution": 2}`` + dictionary with template specifications (e.g., a specification of ``{'resolution': 2}`` would lead to resampling on a 2mm resolution of the space). name : :obj:`str` Name of workflow (default: ``bold_std_trans_wf``) @@ -762,8 +603,6 @@ def init_bold_std_trans_wf( Skull-stripping mask of reference image bold_split Individual 3D volumes, not motion corrected - t2star - Estimated T2\\* map in BOLD native space fieldwarp a :abbr:`DFM (displacements field map)` in ITK format hmc_xforms @@ -791,21 +630,18 @@ def init_bold_std_trans_wf( bold_aparc_std FreeSurfer's ``aparc+aseg.mgz`` atlas, in template space at the BOLD resolution (only if ``recon-all`` was run) - t2star_std - Estimated T2\\* map in template space template Template identifiers synchronized correspondingly to previously described outputs. """ - from fmriprep.interfaces.maths import Clip from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.func.util import init_bold_reference_wf from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms from niworkflows.interfaces.itk import MultiApplyTransforms - from niworkflows.interfaces.utility import KeySelect from niworkflows.interfaces.nibabel import GenerateSamplingReference from niworkflows.interfaces.nilearn import Merge + from niworkflows.interfaces.utility import KeySelect from niworkflows.utils.spaces import format_reference workflow = Workflow(name=name) @@ -838,7 +674,6 @@ def init_bold_std_trans_wf( "bold_aseg", "bold_mask", "bold_split", - "t2star", "fieldwarp", "hmc_xforms", "itk_bold_to_t1", @@ -849,9 +684,7 @@ def init_bold_std_trans_wf( name="inputnode", ) - iterablesource = pe.Node( - niu.IdentityInterface(fields=["std_target"]), name="iterablesource" - ) + iterablesource = pe.Node(niu.IdentityInterface(fields=["std_target"]), name="iterablesource") # Generate conversions for every template+spec at the input iterablesource.iterables = [("std_target", std_vol_references)] @@ -866,15 +699,11 @@ def init_bold_std_trans_wf( ) select_std = pe.Node( - KeySelect(fields=["anat2std_xfm"]), - name="select_std", - run_without_submitting=True, + KeySelect(fields=["anat2std_xfm"]), name="select_std", run_without_submitting=True ) select_tpl = pe.Node( - niu.Function(function=_select_template), - name="select_tpl", - run_without_submitting=True, + niu.Function(function=_select_template), name="select_tpl", run_without_submitting=True ) gen_ref = pe.Node( @@ -901,54 +730,48 @@ def init_bold_std_trans_wf( ) bold_to_std_transform = pe.Node( - MultiApplyTransforms( - interpolation="LanczosWindowedSinc", float=True, copy_dtype=True - ), + MultiApplyTransforms(interpolation="LanczosWindowedSinc", float=True, copy_dtype=True), name="bold_to_std_transform", mem_gb=mem_gb * 3 * omp_nthreads, n_procs=omp_nthreads, ) - # Interpolation can occasionally produce below-zero values as an artifact - threshold = pe.MapNode( - Clip(minimum=0), - name="threshold", - iterfield=['in_file'], - mem_gb=DEFAULT_MEMORY_MIN_GB) - - merge = pe.Node(Merge(compress=use_compression), name="merge", mem_gb=mem_gb * 3) + merge = pe.Node( + Merge(compress=use_compression), name="merge", mem_gb=mem_gb * 3 + ) # TODO: Lessen expensive restrictions # Generate a reference on the target standard space + # TODO: Replace with masking interface? gen_final_ref = init_bold_reference_wf(omp_nthreads=omp_nthreads, pre_mask=True) - # fmt:off + + # fmt: off workflow.connect([ - (iterablesource, split_target, [("std_target", "in_target")]), - (iterablesource, select_tpl, [("std_target", "template")]), - (inputnode, select_std, [("anat2std_xfm", "anat2std_xfm"), - ("templates", "keys")]), - (inputnode, mask_std_tfm, [("bold_mask", "input_image")]), - (inputnode, gen_ref, [(("bold_split", _first), "moving_image")]), - (inputnode, merge_xforms, [("hmc_xforms", "in4"), - ("fieldwarp", "in3"), - (("itk_bold_to_t1", _aslist), "in2")]), - (inputnode, merge, [("name_source", "header_source")]), - (inputnode, mask_merge_tfms, [(("itk_bold_to_t1", _aslist), "in2")]), - (inputnode, bold_to_std_transform, [("bold_split", "input_image")]), - (split_target, select_std, [("space", "key")]), - (select_std, merge_xforms, [("anat2std_xfm", "in1")]), - (select_std, mask_merge_tfms, [("anat2std_xfm", "in1")]), - (split_target, gen_ref, [(("spec", _is_native), "keep_native")]), - (select_tpl, gen_ref, [("out", "fixed_image")]), - (merge_xforms, bold_to_std_transform, [("out", "transforms")]), - (gen_ref, bold_to_std_transform, [("out_file", "reference_image")]), - (gen_ref, mask_std_tfm, [("out_file", "reference_image")]), - (mask_merge_tfms, mask_std_tfm, [("out", "transforms")]), - (mask_std_tfm, gen_final_ref, [("output_image", "inputnode.bold_mask")]), - (bold_to_std_transform, threshold, [("out_files", "in_file")]), - (threshold, merge, [("out_file", "in_files")]), - (merge, gen_final_ref, [("out_file", "inputnode.bold_file")]), + (iterablesource, split_target, [('std_target', 'in_target')]), + (iterablesource, select_tpl, [('std_target', 'template')]), + (inputnode, select_std, [('anat2std_xfm', 'anat2std_xfm'), + ('templates', 'keys')]), + (inputnode, mask_std_tfm, [('bold_mask', 'input_image')]), + (inputnode, gen_ref, [(('bold_split', _first), 'moving_image')]), + (inputnode, merge_xforms, [('hmc_xforms', 'in4'), + ('fieldwarp', 'in3'), + (('itk_bold_to_t1', _aslist), 'in2')]), + (inputnode, merge, [('name_source', 'header_source')]), + (inputnode, mask_merge_tfms, [(('itk_bold_to_t1', _aslist), 'in2')]), + (inputnode, bold_to_std_transform, [('bold_split', 'input_image')]), + (split_target, select_std, [('space', 'key')]), + (select_std, merge_xforms, [('anat2std_xfm', 'in1')]), + (select_std, mask_merge_tfms, [('anat2std_xfm', 'in1')]), + (split_target, gen_ref, [(('spec', _is_native), 'keep_native')]), + (select_tpl, gen_ref, [('out', 'fixed_image')]), + (merge_xforms, bold_to_std_transform, [('out', 'transforms')]), + (gen_ref, bold_to_std_transform, [('out_file', 'reference_image')]), + (gen_ref, mask_std_tfm, [('out_file', 'reference_image')]), + (mask_merge_tfms, mask_std_tfm, [('out', 'transforms')]), + (mask_std_tfm, gen_final_ref, [('output_image', 'inputnode.bold_mask')]), + (bold_to_std_transform, merge, [('out_files', 'in_files')]), + (merge, gen_final_ref, [('out_file', 'inputnode.bold_file')]), ]) - # fmt:on + # fmt: on output_names = [ "bold_mask_std", @@ -956,26 +779,20 @@ def init_bold_std_trans_wf( "bold_std_ref", "spatial_reference", "template", - ] - if freesurfer: - output_names.extend(["bold_aseg_std", "bold_aparc_std"]) - if multiecho: - output_names.append("t2star_std") + ] + freesurfer * ["bold_aseg_std", "bold_aparc_std"] - poutputnode = pe.Node( - niu.IdentityInterface(fields=output_names), name="poutputnode" - ) - # fmt:off + poutputnode = pe.Node(niu.IdentityInterface(fields=output_names), name="poutputnode") + # fmt: off workflow.connect([ # Connecting outputnode (iterablesource, poutputnode, [ - (("std_target", format_reference), "spatial_reference")]), - (merge, poutputnode, [("out_file", "bold_std")]), - (gen_final_ref, poutputnode, [("outputnode.ref_image", "bold_std_ref")]), - (mask_std_tfm, poutputnode, [("output_image", "bold_mask_std")]), - (select_std, poutputnode, [("key", "template")]), + (('std_target', format_reference), 'spatial_reference')]), + (merge, poutputnode, [('out_file', 'bold_std')]), + (gen_final_ref, poutputnode, [('outputnode.ref_image', 'bold_std_ref')]), + (mask_std_tfm, poutputnode, [('output_image', 'bold_mask_std')]), + (select_std, poutputnode, [('key', 'template')]), ]) - # fmt:on + # fmt: on if freesurfer: # Sample the parcellation files to functional space @@ -985,44 +802,29 @@ def init_bold_std_trans_wf( aparc_std_tfm = pe.Node( ApplyTransforms(interpolation="MultiLabel"), name="aparc_std_tfm", mem_gb=1 ) - # fmt:off - workflow.connect([ - (inputnode, aseg_std_tfm, [("bold_aseg", "input_image")]), - (inputnode, aparc_std_tfm, [("bold_aparc", "input_image")]), - (select_std, aseg_std_tfm, [("anat2std_xfm", "transforms")]), - (select_std, aparc_std_tfm, [("anat2std_xfm", "transforms")]), - (gen_ref, aseg_std_tfm, [("out_file", "reference_image")]), - (gen_ref, aparc_std_tfm, [("out_file", "reference_image")]), - (aseg_std_tfm, poutputnode, [("output_image", "bold_aseg_std")]), - (aparc_std_tfm, poutputnode, [("output_image", "bold_aparc_std")]), - ]) - # fmt:on - if multiecho: - t2star_std_tfm = pe.Node( - ApplyTransforms(interpolation="LanczosWindowedSinc", float=True), - name="t2star_std_tfm", mem_gb=1 - ) - # fmt:off + # fmt: off workflow.connect([ - (inputnode, t2star_std_tfm, [("t2star", "input_image")]), - (select_std, t2star_std_tfm, [("anat2std_xfm", "transforms")]), - (gen_ref, t2star_std_tfm, [("out_file", "reference_image")]), - (t2star_std_tfm, poutputnode, [("output_image", "t2star_std")]), + (inputnode, aseg_std_tfm, [('bold_aseg', 'input_image')]), + (inputnode, aparc_std_tfm, [('bold_aparc', 'input_image')]), + (select_std, aseg_std_tfm, [('anat2std_xfm', 'transforms')]), + (select_std, aparc_std_tfm, [('anat2std_xfm', 'transforms')]), + (gen_ref, aseg_std_tfm, [('out_file', 'reference_image')]), + (gen_ref, aparc_std_tfm, [('out_file', 'reference_image')]), + (aseg_std_tfm, poutputnode, [('output_image', 'bold_aseg_std')]), + (aparc_std_tfm, poutputnode, [('output_image', 'bold_aparc_std')]), ]) - # fmt:on + # fmt: on # Connect parametric outputs to a Join outputnode outputnode = pe.JoinNode( - niu.IdentityInterface(fields=output_names), - name="outputnode", - joinsource="iterablesource", + niu.IdentityInterface(fields=output_names), name="outputnode", joinsource="iterablesource" ) - # fmt:off + # fmt: off workflow.connect([ (poutputnode, outputnode, [(f, f) for f in output_names]), ]) - # fmt:on + # fmt: on return workflow @@ -1062,7 +864,7 @@ def init_bold_preproc_trans_wf( Include SDC warp in single-shot transform from BOLD to MNI interpolation : :obj:`str` Interpolation type to be used by ANTs' ``applyTransforms`` - (default ``"LanczosWindowedSinc"``) + (default ``'LanczosWindowedSinc'``) Inputs ------ @@ -1082,7 +884,6 @@ def init_bold_preproc_trans_wf( BOLD series, resampled in native space, including all preprocessing """ - from fmriprep.interfaces.maths import Clip from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.itk import MultiApplyTransforms from niworkflows.interfaces.nilearn import Merge @@ -1104,16 +905,11 @@ def init_bold_preproc_trans_wf( ) inputnode = pe.Node( - niu.IdentityInterface( - fields=["name_source", "bold_file", "hmc_xforms", "fieldwarp"] - ), + niu.IdentityInterface(fields=["name_source", "bold_file", "hmc_xforms", "fieldwarp"]), name="inputnode", ) - outputnode = pe.Node( - niu.IdentityInterface(fields=["bold", "bold_ref", "bold_ref_brain"]), - name="outputnode", - ) + outputnode = pe.Node(niu.IdentityInterface(fields=["bold"]), name="outputnode") merge_xforms = pe.Node( niu.Merge(2), @@ -1129,34 +925,25 @@ def init_bold_preproc_trans_wf( n_procs=omp_nthreads, ) - # Interpolation can occasionally produce below-zero values as an artifact - threshold = pe.MapNode( - Clip(minimum=0), - name="threshold", - iterfield=['in_file'], - mem_gb=DEFAULT_MEMORY_MIN_GB) - merge = pe.Node(Merge(compress=use_compression), name="merge", mem_gb=mem_gb * 3) - # fmt:off + # fmt: off workflow.connect([ - (inputnode, merge_xforms, [("fieldwarp", "in1"), - ("hmc_xforms", "in2")]), - (inputnode, bold_transform, [("bold_file", "input_image"), - (("bold_file", _first), "reference_image")]), - (inputnode, merge, [("name_source", "header_source")]), - (merge_xforms, bold_transform, [("out", "transforms")]), - (bold_transform, threshold, [("out_files", "in_file")]), - (threshold, merge, [("out_file", "in_files")]), - (merge, outputnode, [("out_file", "bold")]), + (inputnode, merge_xforms, [('fieldwarp', 'in1'), + ('hmc_xforms', 'in2')]), + (inputnode, bold_transform, [('bold_file', 'input_image'), + (('bold_file', _first), 'reference_image')]), + (inputnode, merge, [('name_source', 'header_source')]), + (merge_xforms, bold_transform, [('out', 'transforms')]), + (bold_transform, merge, [('out_files', 'in_files')]), + (merge, outputnode, [('out_file', 'bold')]), ]) - # fmt:on + # fmt: on + return workflow -def init_bold_grayords_wf( - grayord_density, mem_gb, repetition_time, name="bold_grayords_wf" -): +def init_bold_grayords_wf(grayord_density, mem_gb, repetition_time, name="bold_grayords_wf"): """ Sample Grayordinates files onto the fsLR atlas. @@ -1168,7 +955,7 @@ def init_bold_grayords_wf( :simple_form: yes from fmriprep.workflows.bold import init_bold_grayords_wf - wf = init_bold_grayords_wf(mem_gb=0.1, grayord_density="91k") + wf = init_bold_grayords_wf(mem_gb=0.1, grayord_density='91k') Parameters ---------- @@ -1177,18 +964,16 @@ def init_bold_grayords_wf( mem_gb : :obj:`float` Size of BOLD file in GB name : :obj:`str` - Unique name for the subworkflow (default: ``"bold_grayords_wf"``) + Unique name for the subworkflow (default: ``'bold_grayords_wf'``) Inputs ------ - bold_std : :obj:`str` - List of BOLD conversions to standard spaces. - spatial_reference :obj:`str` - List of unique identifiers corresponding to the BOLD standard-conversions. - subjects_dir : :obj:`str` - FreeSurfer's subjects directory. + subcortical_volume : :obj:`str` + The subcortical structures in MNI152NLin6Asym space. + subcortical_labels : :obj:`str` + Volume file containing all subcortical labels surf_files : :obj:`str` - List of BOLD files resampled on the fsaverage (ico7) surfaces. + List of BOLD files resampled on the fsaverage (ico7) surfaces surf_refs : List of unique identifiers corresponding to the BOLD surface-conversions. @@ -1197,18 +982,19 @@ def init_bold_grayords_wf( cifti_bold : :obj:`str` List of BOLD grayordinates files - (L)eft and (R)ight. cifti_variant : :obj:`str` - Only ``"HCP Grayordinates"`` is currently supported. + Only ``'HCP Grayordinates'`` is currently supported. cifti_metadata : :obj:`str` Path of metadata files corresponding to ``cifti_bold``. cifti_density : :obj:`str` Density (i.e., either `91k` or `170k`) of ``cifti_bold``. """ - import templateflow.api as tf + import templateflow as tf from niworkflows.engine.workflows import LiterateWorkflow as Workflow - from niworkflows.interfaces.cifti import GenerateCifti from niworkflows.interfaces.utility import KeySelect + from ...interfaces.workbench import CiftiCreateDenseTimeseries + workflow = Workflow(name=name) workflow.__desc__ = """\ *Grayordinates* files [@hcppipelines] containing {density} samples were also @@ -1218,16 +1004,13 @@ def init_bold_grayords_wf( density=grayord_density ) - fslr_density, mni_density = ( - ("32k", "2") if grayord_density == "91k" else ("59k", "1") - ) + fslr_density = "32k" if grayord_density == "91k" else "59k" inputnode = pe.Node( niu.IdentityInterface( fields=[ - "bold_std", - "spatial_reference", - "subjects_dir", + "subcortical_volume", + "subcortical_labels", "surf_files", "surf_refs", ] @@ -1247,15 +1030,6 @@ def init_bold_grayords_wf( name="outputnode", ) - # extract out to BOLD base - select_std = pe.Node( - KeySelect(fields=["bold_std"]), - name="select_std", - run_without_submitting=True, - nohash=True, - ) - select_std.inputs.key = "MNI152NLin6Asym_res-%s" % mni_density - select_fs_surf = pe.Node( KeySelect(fields=["surf_files"]), name="select_fs_surf", @@ -1279,53 +1053,26 @@ def init_bold_grayords_wf( ], ) resample.inputs.current_sphere = [ - str( - tf.get( - "fsaverage", - hemi=hemi, - density="164k", - desc="std", - suffix="sphere", - extension=".surf.gii", - ) - ) + str(tf.api.get("fsaverage", hemi=hemi, density="164k", desc="std", suffix="sphere")) for hemi in "LR" ] + resample.inputs.current_area = [ str( - tf.get( - "fsaverage", - hemi=hemi, - density="164k", - desc="vaavg", - suffix="midthickness", - extension=".shape.gii", - ) + tf.api.get("fsaverage", hemi=hemi, density="164k", desc="vaavg", suffix="midthickness") ) for hemi in "LR" ] resample.inputs.new_sphere = [ str( - tf.get( - "fsLR", - space="fsaverage", - hemi=hemi, - density=fslr_density, - suffix="sphere", - extension=".surf.gii", - ) + tf.api.get("fsLR", space="fsaverage", hemi=hemi, density=fslr_density, suffix="sphere") ) for hemi in "LR" ] resample.inputs.new_area = [ str( - tf.get( - "fsLR", - hemi=hemi, - density=fslr_density, - desc="vaavg", - suffix="midthickness", - extension=".shape.gii", + tf.api.get( + "fsLR", hemi=hemi, density=fslr_density, desc="vaavg", suffix="midthickness" ) ) for hemi in "LR" @@ -1334,35 +1081,71 @@ def init_bold_grayords_wf( "space-fsLR_hemi-%s_den-%s_bold.gii" % (h, grayord_density) for h in "LR" ] - gen_cifti = pe.Node( - GenerateCifti( - volume_target="MNI152NLin6Asym", - surface_target="fsLR", - TR=repetition_time, - surface_density=fslr_density, - ), - name="gen_cifti", + split_surfaces = pe.Node( + niu.Function(function=_split_surfaces, output_names=["left_surface", "right_surface"]), + name="split_surfaces", + ) + gen_cifti = pe.Node(CiftiCreateDenseTimeseries(timestep=repetition_time), name="gen_cifti") + gen_cifti.inputs.volume_structure_labels = str( + tf.api.get("MNI152NLin6Asym", resolution=2, atlas="HCP", suffix="dseg") ) + gen_cifti_metadata = pe.Node( + niu.Function(function=_gen_metadata, output_names=["out_metadata", "variant", "density"]), + name="gen_cifti_metadata", + ) + gen_cifti_metadata.inputs.grayord_density = grayord_density - # fmt:off + # fmt: off workflow.connect([ - (inputnode, gen_cifti, [("subjects_dir", "subjects_dir")]), - (inputnode, select_std, [("bold_std", "bold_std"), - ("spatial_reference", "keys")]), - (inputnode, select_fs_surf, [("surf_files", "surf_files"), - ("surf_refs", "keys")]), - (select_fs_surf, resample, [("surf_files", "in_file")]), - (select_std, gen_cifti, [("bold_std", "bold_file")]), - (resample, gen_cifti, [("out_file", "surface_bolds")]), - (gen_cifti, outputnode, [("out_file", "cifti_bold"), - ("variant", "cifti_variant"), - ("out_metadata", "cifti_metadata"), - ("density", "cifti_density")]), + (inputnode, gen_cifti, [ + ('subcortical_volume', 'volume_data'), + ('subcortical_labels', 'volume_structure_labels')]), + (inputnode, select_fs_surf, [('surf_files', 'surf_files'), + ('surf_refs', 'keys')]), + (select_fs_surf, resample, [('surf_files', 'in_file')]), + (resample, split_surfaces, [('out_file', 'in_surfaces')]), + (split_surfaces, gen_cifti, [ + ('left_surface', 'left_metric'), + ('right_surface', 'right_metric')]), + (gen_cifti, outputnode, [('out_file', 'cifti_bold')]), + (gen_cifti_metadata, outputnode, [ + ('variant', 'cifti_variant'), + ('out_metadata', 'cifti_metadata'), + ('density', 'cifti_density')]), ]) - # fmt:on + # fmt: on return workflow +def _gen_metadata(grayord_density): + import json + from pathlib import Path + + space = "HCP grayordinates" + out_json = { + "grayordinates": grayord_density, + "space": space, + "surface": "fsLR", + "volume": "MNI152NLin6Asym", + "surface_density": "32k" if grayord_density == "91k" else "59k", + } + out_metadata = Path("dtseries_variant.json").absolute() + out_metadata.write_text(json.dumps(out_json, indent=2)) + return str(out_metadata), space, grayord_density + + +def _split_surfaces(in_surfaces): + """ + Split surfaces to differentiate left and right + + Returns + ------- + Left surface + Right surface + """ + return in_surfaces[0], in_surfaces[1] + + def _split_spec(in_target): space, spec = in_target template = space.split(":")[0] @@ -1379,6 +1162,10 @@ def _select_template(template): # Sanitize resolution res = specs.pop("res", None) or specs.pop("resolution", None) or "native" + # workaround for templates without res- identifier + if template in ("UNCInfant",): + res = None + if res != "native": specs["resolution"] = res return get_template_specs(template, template_spec=specs)[0] @@ -1401,7 +1188,9 @@ def _first(inlist): def _aslist(in_value): if isinstance(in_value, list): return in_value - return [in_value] + elif isinstance(in_value, str): + return [in_value] + return list(in_value) def _is_native(in_value): @@ -1409,16 +1198,12 @@ def _is_native(in_value): def _itk2lta(in_file, src_file, dst_file): - import nitransforms as nt from pathlib import Path + import nitransforms as nt + out_file = Path("out.lta").absolute() nt.linear.load( in_file, fmt="fs" if in_file.endswith(".lta") else "itk", reference=src_file ).to_filename(out_file, moving=dst_file, fmt="fs") - return str(out_file) - - -def _sorted_by_basename(inlist): - from os.path import basename - return sorted(inlist, key=lambda x: str(basename(x))) + return str(out_file) \ No newline at end of file