Created
January 25, 2011 20:42
-
-
Save mwaskom/795621 to your computer and use it in GitHub Desktop.
Standard TBSS implementation in Nipype
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#! /usr/bin/env python | |
import os | |
from os.path import join as pjoin | |
import argparse | |
import nipype.interfaces.fsl as fsl | |
import nipype.interfaces.io as nio | |
import nipype.interfaces.utility as util | |
import nipype.pipeline.engine as pe | |
import nibabel as nib | |
# Parse command line arguments | |
parser = argparse.ArgumentParser(description="Nipype script to run standard FSL-TBSS analysis.") | |
parser.add_argument("-subjects", nargs="*", | |
metavar="subject_id", | |
help="process subject(s)") | |
parser.add_argument("-workflows", nargs="*", metavar="<wf>", | |
help="which workflows to run") | |
parser.add_argument("-inseries",action="store_true", | |
help="force running in series") | |
args = parser.parse_args() | |
# Set up some paths | |
project_dir = "/mindhive/gablab/fluid/Analysis/Benchmark/TBSS/Nipype_Std" | |
data_dir = project_dir | |
analysis_dir = pjoin(project_dir, "results") | |
working_dir = pjoin(project_dir, "workingdir") | |
#---------------------------------------------------------------------# | |
# Registration | |
#---------------------------------------------------------------------# | |
# Set up a node to supply subject ids | |
subjsource = pe.Node(util.IdentityInterface(fields=["subject_id"]), | |
iterables=("subject_id", args.subjects), | |
name="subjectsource") | |
# Grab the FA images | |
outfields=["fa_image"] | |
reggrabber = pe.Node(nio.DataGrabber(infields=["subject_id"], | |
outfields=outfields, | |
base_directory=data_dir, | |
template="%s_fa.nii.gz"), | |
name="reggrabber") | |
reggrabber.inputs.template_args = dict(fa_image=[["subject_id"]]) | |
# Prep the FA images | |
prepfa = pe.Node(fsl.ImageMaths(suffix="_prep"),name="prepfa") | |
# Create a mask | |
getmask = pe.Node(fsl.ImageMaths(op_string="-bin", | |
suffix="_mask"), | |
name="getmask") | |
# Get a reference to the warp target (FMRIB's FA target) | |
warp_target =fsl.Info.standard_image("FMRIB58_FA_1mm.nii.gz") | |
# Flirt the FA image to the target | |
flirt = pe.Node(fsl.FLIRT(reference=warp_target), | |
name="flirt") | |
# Fnirt the FA image to the target | |
config_file=pjoin(os.environ["FSLDIR"], "etc/flirtsch/FA_2_FMRIB58_1mm.cnf") | |
fnirt = pe.Node(fsl.FNIRT(ref_file=warp_target, | |
config_file=config_file, | |
fieldcoeff_file=True), | |
name="fnirt") | |
# Apply the warpfield to the masked FA image | |
warpfa = pe.Node(fsl.ApplyWarp(ref_file=warp_target), | |
name="warpfa") | |
# Set up the data sink node | |
regsink = pe.Node(nio.DataSink(base_directory=analysis_dir, | |
parameterization=False, | |
substitutions=[("_prep_warp","")]), | |
name="regsink") | |
# Define the registration workflow | |
reg = pe.Workflow(name="registration", base_dir=working_dir) | |
def prep_op_string(infile): | |
img = nib.load(infile) | |
dimtup = tuple([d-2 for d in img.get_shape()]) | |
return "-min 1 -ero -roi 1 %d 1 %d 1 %d 0 1"%dimtup | |
# Connect up the registration workflow | |
reg.connect([ | |
(subjsource, reggrabber, [("subject_id", "subject_id")]), | |
(reggrabber, prepfa, [("fa_image", "in_file")]), | |
(reggrabber, prepfa, [(("fa_image", prep_op_string), "op_string")]), | |
(prepfa, getmask, [("out_file", "in_file")]), | |
(prepfa, flirt, [("out_file", "in_file")]), | |
(getmask, flirt, [("out_file", "in_weight")]), | |
(prepfa, fnirt, [("out_file", "in_file")]), | |
(getmask, fnirt, [("out_file", "inmask_file")]), | |
(flirt, fnirt, [("out_matrix_file", "affine_file")]), | |
(prepfa, warpfa, [("out_file", "in_file")]), | |
(fnirt, warpfa, [("fieldcoeff_file", "field_file")]), | |
(subjsource, regsink, [("subject_id", "container")]), | |
(warpfa, regsink, [("out_file", "@fa_warped")]), | |
]) | |
#---------------------------------------------------------------------# | |
# Skeletonise | |
#---------------------------------------------------------------------# | |
# Define the skeleton thresh | |
# (it gets used a couple of times) | |
skeleton_thresh = 0.2 | |
# Grab all of the normalized single-subject FA images in a sorted list | |
skelgrabber = pe.MapNode(nio.DataGrabber(infields=["subject_id"], | |
outfields=["fa_images"], | |
base_directory=analysis_dir, | |
sort_filelist=True, | |
template="%s/%s_fa.nii.gz"), | |
iterfield=["subject_id"], | |
name="skelgrabber") | |
skelgrabber.inputs.template_args = dict(fa_images=[["subject_id","subject_id"]]) | |
# This seems suboptimal but it appears not to cascade | |
skelgrabber.overwrite=True | |
skelgrabber.inputs.subject_id = args.subjects | |
# Merge the FA files into a 4D file | |
mergefa = pe.Node(fsl.Merge(dimension="t"), name="mergefa") | |
# Get a group mask | |
groupmask = pe.Node(fsl.ImageMaths(op_string="-max 0 -Tmin -bin", | |
out_data_type="char", | |
suffix="_mask"), | |
name="groupmask") | |
maskgroup = pe.Node(fsl.ImageMaths(op_string="-mas", | |
suffix="_mask"), | |
name="maskgroup") | |
# Take the mean over the fourth dimension | |
meanfa = pe.Node(fsl.ImageMaths(op_string="-Tmean", | |
suffix="_mean"), | |
name="meanfa") | |
# Use the mean FA volume to generate a tract skeleton | |
makeskeleton = pe.Node(fsl.TractSkeleton(skeleton_file=True), | |
name="makeskeleton") | |
# Mask the skeleton at the threshold | |
skeletonmask = pe.Node(fsl.ImageMaths(op_string="-thr %.1f -bin"%skeleton_thresh, | |
suffix="_mask"), | |
name="skeletonmask") | |
# Invert the brainmask then add in the tract skeleton | |
invertmask = pe.Node(fsl.ImageMaths(suffix="_inv", | |
op_string="-mul -1 -add 1 -add"), | |
name="invertmask") | |
# Generate a distance map with the tract skeleton | |
distancemap = pe.Node(fsl.DistanceMap(), | |
name="distancemap") | |
# Project the FA values onto the skeleton | |
projectfa = pe.Node(fsl.TractSkeleton(threshold=skeleton_thresh, | |
use_cingulum_mask=True, | |
project_data=True), | |
name="projectfa") | |
# Define a dataink node for the skeleton workflow | |
skeletonsink = pe.Node(nio.DataSink(base_directory=pjoin(analysis_dir, "group"), | |
parameterization=False, | |
substitutions= [ | |
(args.subjects[0] + "_", ""), | |
("fa_merged_mask_mean_skeleton_mask", "skeleton_mask"), | |
("fa_merged_mask_mean_skeleton", "skeleton"), | |
("fa_merged_mask_mean", "mean_fa"), | |
("fa_merged_skeletonised", "skeletonised_fa")]), | |
name="skeletonsink") | |
# Define the skeleton workflow | |
skeletor = pe.Workflow(name="skeletonise", base_dir=working_dir) | |
# And connect it up | |
skeletor.connect([ | |
(skelgrabber, mergefa, [("fa_images", "in_files")]), | |
(mergefa, groupmask, [("merged_file", "in_file")]), | |
(mergefa, maskgroup, [("merged_file", "in_file")]), | |
(groupmask, maskgroup, [("out_file", "in_file2")]), | |
(maskgroup, meanfa, [("out_file", "in_file")]), | |
(meanfa, makeskeleton, [("out_file", "in_file")]), | |
(groupmask, invertmask, [("out_file", "in_file")]), | |
(makeskeleton, skeletonmask, [("skeleton_file", "in_file")]), | |
(skeletonmask, invertmask, [("out_file", "in_file2")]), | |
(invertmask, distancemap, [("out_file", "in_file")]), | |
(distancemap, projectfa, [("distance_map", "distance_map")]), | |
(meanfa, projectfa, [("out_file", "in_file")]), | |
(mergefa, projectfa, [("merged_file", "data_file")]), | |
(meanfa, skeletonsink, [("out_file", "@mean_fa")]), | |
(projectfa, skeletonsink, [("projected_data", "@projected_fa")]), | |
(makeskeleton, skeletonsink, [("skeleton_file", "@skeleton")]), | |
(skeletonmask, skeletonsink, [("out_file", "@skeleton_mask")]), | |
]) | |
def workflow_runner(flow, stem): | |
if any([a.startswith(stem) for a in args.workflows]) or args.workflows==["all"]: | |
flow.run(inseries=args.inseries) | |
if __name__ == "__main__": | |
# Run some things | |
workflow_runner(reg, "reg") | |
workflow_runner(skeletor, "skel") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment