def multigraphs(fibers, labels, outdir, gformat='gpickle'): """ Creates a brain graph from fiber streamlines """ startTime = datetime.now() fiber_name = mgu.get_filename(fibers) base = fiber_name.split('_fibers', 1)[0] # Create output directories for graphs label_name = [mgu.get_filename(x) for x in labels] for label in label_name: p = Popen("mkdir -p " + outdir + "/graphs/" + label, stdout=PIPE, stderr=PIPE, shell=True) # Create names of files to be produced graphs = [ op.join(outdir, "graphs", x, "{}_{}.{}".format(base, x, gformat)) for x in label_name ] print "Graphs of streamlines downsampled to given labels: " +\ (", ".join([x for x in graphs])) # Load fibers print "Loading fibers..." fiber_npz = np.load(fibers) tracks = fiber_npz[fiber_npz.keys()[0]] # Generate graphs from streamlines for each parcellation for idx, label in enumerate(label_name): print "Generating graph for " + label + " parcellation..." labels_im = nb.load(labels[idx]) g1 = mgg(len(np.unique(labels_im.get_data())) - 1, labels[idx]) g1.make_graph(tracks) g1.summary() g1.save_graph(graphs[idx]) print "Execution took: " + str(datetime.now() - startTime) print "Complete!" pass
def multigraphs(fibers, labels, outdir): """ Creates a brain graph from fiber streamlines """ startTime = datetime.now() fiber_name = op.splitext(op.splitext(op.basename(fibers))[0])[0] base = fiber_name.split('_fibers', 1)[0] # Create output directories for graphs label_name = [op.splitext(op.splitext(op.basename(x))[0])[0] for x in labels] for label in label_name: p = Popen("mkdir -p " + outdir + "/graphs/" + label, stdout=PIPE, stderr=PIPE, shell=True) # Create names of files to be produced graphs = [outdir + "/graphs/" + x + '/' + base + "_" + x + ".graphml" for x in label_name] print "Graphs of streamlines downsampled to given labels: " +\ (", ".join([x for x in graphs])) # Load fibers print "Loading fibers..." fiber_npz = np.load(fibers) tracks = fiber_npz[fiber_npz.keys()[0]] # Generate graphs from streamlines for each parcellation for idx, label in enumerate(label_name): print "Generating graph for " + label + " parcellation..." labels_im = nb.load(labels[idx]) g1 = mgg(len(np.unique(labels_im.get_data()))-1, labels[idx]) g1.make_graph(tracks) g1.summary() g1.save_graph(graphs[idx]) print "Execution took: " + str(datetime.now() - startTime) print "Complete!" pass
def ndmg_pipeline(dti, bvals, bvecs, mprage, atlas, mask, labels, outdir, clean=False, fmt='gpickle'): """ Creates a brain graph from MRI data """ startTime = datetime.now() # Create derivative output directories dti_name = mgu().get_filename(dti) cmd = "".join([ "mkdir -p ", outdir, "/reg_dti ", outdir, "/tensors ", outdir, "/fibers ", outdir, "/graphs" ]) mgu().execute_cmd(cmd) # Graphs are different because of multiple atlases if isinstance(labels, list): label_name = [mgu().get_filename(x) for x in labels] for label in label_name: p = Popen("mkdir -p " + outdir + "/graphs/" + label, stdout=PIPE, stderr=PIPE, shell=True) else: label_name = mgu().get_filename(labels) p = Popen("mkdir -p " + outdir + "/graphs/" + label_name, stdout=PIPE, stderr=PIPE, shell=True) # Create derivative output file names aligned_dti = "".join([outdir, "/reg_dti/", dti_name, "_aligned.nii.gz"]) tensors = "".join([outdir, "/tensors/", dti_name, "_tensors.npz"]) fibers = "".join([outdir, "/fibers/", dti_name, "_fibers.npz"]) print("This pipeline will produce the following derivatives...") print("DTI volume registered to atlas: " + aligned_dti) print("Diffusion tensors in atlas space: " + tensors) print("Fiber streamlines in atlas space: " + fibers) # Again, graphs are different graphs = [ "".join([outdir, "/graphs/", x, '/', dti_name, "_", x, '.', fmt]) for x in label_name ] print("Graphs of streamlines downsampled to given labels: " + (", ".join([x for x in graphs]))) # Creates gradient table from bvalues and bvectors print "Generating gradient table..." dti1 = "".join([outdir, "/tmp/", dti_name, "_t1.nii.gz"]) bvecs1 = "".join([outdir, "/tmp/", dti_name, "_1.bvec"]) mgp.rescale_bvec(bvecs, bvecs1) gtab = mgu().load_bval_bvec_dti(bvals, bvecs1, dti, dti1) # Align DTI volumes to Atlas print("Aligning volumes...") mgr().dti2atlas(dti1, gtab, mprage, atlas, aligned_dti, outdir, clean) print("Beginning tractography...") # Compute tensors and track fiber streamlines tens, tracks = mgt().eudx_basic(aligned_dti, mask, gtab, stop_val=0.2) # And save them to disk np.savez(tensors, tens) np.savez(fibers, tracks) # Generate graphs from streamlines for each parcellation for idx, label in enumerate(label_name): print("Generating graph for " + label + "parcellation...") labels_im = nb.load(labels[idx]) g1 = mgg(len(np.unique(labels_im.get_data())) - 1, labels[idx]) g1.make_graph(tracks) g1.summary() g1.save_graph(graphs[idx], fmt=fmt) print("Execution took: " + str(datetime.now() - startTime)) # Clean temp files if clean: print("Cleaning up intermediate files... ") cmd = "".join([ 'rm -f ', tensors, ' ', outdir, '/tmp/', dti_name, '*', ' ', aligned_dti, ' ', fibers ]) mgu().execute_cmd(cmd) print("Complete!") pass
def ndmg_pipeline(dti, bvals, bvecs, mprage, atlas, mask, labels, outdir, clean=False, fmt='gpickle'): """ Creates a brain graph from MRI data """ startTime = datetime.now() print fmt # Create derivative output directories dti_name = op.splitext(op.splitext(op.basename(dti))[0])[0] cmd = "mkdir -p " + outdir + "/reg_dti " + outdir + "/tensors " +\ outdir + "/fibers " + outdir + "/graphs" p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) p.communicate() # Graphs are different because of multiple atlases label_name = [op.splitext(op.splitext(op.basename(x))[0])[0] for x in labels] for label in label_name: p = Popen("mkdir -p " + outdir + "/graphs/" + label, stdout=PIPE, stderr=PIPE, shell=True) # Create derivative output file names aligned_dti = outdir + "/reg_dti/" + dti_name + "_aligned.nii.gz" tensors = outdir + "/tensors/" + dti_name + "_tensors.npz" fibers = outdir + "/fibers/" + dti_name + "_fibers.npz" print "This pipeline will produce the following derivatives..." print "DTI volume registered to atlas: " + aligned_dti print "Diffusion tensors in atlas space: " + tensors print "Fiber streamlines in atlas space: " + fibers # Again, graphs are different graphs = [outdir + "/graphs/" + x + '/' + dti_name + "_" + x + '.' + fmt for x in label_name] print "Graphs of streamlines downsampled to given labels: " +\ (", ".join([x for x in graphs])) # Creates gradient table from bvalues and bvectors print "Generating gradient table..." dti1 = outdir + "/tmp/" + dti_name + "_t1.nii.gz" bvecs1 = outdir + "/tmp/" + dti_name + "_1.bvec" mgp.rescale_bvec(bvecs, bvecs1) gtab = mgu().load_bval_bvec_dti(bvals, bvecs1, dti, dti1) # Align DTI volumes to Atlas print "Aligning volumes..." mgr().dti2atlas(dti1, gtab, mprage, atlas, aligned_dti, outdir) print "Beginning tractography..." # Compute tensors and track fiber streamlines tens, tracks = mgt().eudx_basic(aligned_dti, mask, gtab, stop_val=0.2) # And save them to disk np.savez(tensors, tens) np.savez(fibers, tracks) # Generate graphs from streamlines for each parcellation for idx, label in enumerate(label_name): print "Generating graph for " + label + " parcellation..." labels_im = nb.load(labels[idx]) g1 = mgg(len(np.unique(labels_im.get_data()))-1, labels[idx]) g1.make_graph(tracks) g1.summary() g1.save_graph(graphs[idx]) print "Execution took: " + str(datetime.now() - startTime) # Clean temp files if clean: print "Cleaning up intermediate files... " cmd = 'rm -f ' + tensors + ' ' + dti1 + ' ' + aligned_dti + ' ' +\ bvecs1 p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) p.communicate() print "Complete!" pass
def ndmg_dwi_pipeline(dwi, bvals, bvecs, mprage, atlas, mask, labels, outdir, clean=False, fmt='edgelist'): """ Creates a brain graph from MRI data """ startTime = datetime.now() # Create derivative output directories dwi_name = mgu.get_filename(dwi) cmd = "mkdir -p {}/reg/dwi {}/tensors {}/fibers {}/graphs \ {}/qa/tensors {}/qa/tensors {}/qa/fibers {}/qa/reg/dwi" cmd = cmd.format(*([outdir] * 8)) mgu.execute_cmd(cmd) # Graphs are different because of multiple parcellations if isinstance(labels, list): label_name = [mgu.get_filename(x) for x in labels] for label in label_name: mgu.execute_cmd("mkdir -p {}/graphs/{}".format(outdir, label)) else: label_name = mgu.get_filename(labels) mgu.execute_cmd("mkdir -p {}/graphs/".format(outdir, label_name)) # Create derivative output file names aligned_dwi = "{}/reg/dwi/{}_aligned.nii.gz".format(outdir, dwi_name) tensors = "{}/tensors/{}_tensors.npz".format(outdir, dwi_name) fibers = "{}/fibers/{}_fibers.npz".format(outdir, dwi_name) print("This pipeline will produce the following derivatives...") print("DWI volume registered to atlas: {}".format(aligned_dwi)) print("Diffusion tensors in atlas space: {}".format(tensors)) print("Fiber streamlines in atlas space: {}".format(fibers)) # Again, graphs are different graphs = [ "{}/graphs/{}/{}_{}.{}".format(outdir, x, dwi_name, x, fmt) for x in label_name ] print("Graphs of streamlines downsampled to given labels: " + ", ".join([x for x in graphs])) # Creates gradient table from bvalues and bvectors print("Generating gradient table...") dwi1 = "{}/tmp/{}_t1.nii.gz".format(outdir, dwi_name) bvecs1 = "{}/tmp/{}_1.bvec".format(outdir, dwi_name) mgp.rescale_bvec(bvecs, bvecs1) gtab = mgu.load_bval_bvec_dwi(bvals, bvecs1, dwi, dwi1) # Align DWI volumes to Atlas print("Aligning volumes...") mgr().dwi2atlas(dwi1, gtab, mprage, atlas, aligned_dwi, outdir, clean) loc0 = np.where(gtab.b0s_mask)[0][0] reg_mri_pngs(aligned_dwi, atlas, "{}/qa/reg/dwi/".format(outdir), loc=loc0) print("Beginning tractography...") # Compute tensors and track fiber streamlines tens, tracks = mgt().eudx_basic(aligned_dwi, mask, gtab, stop_val=0.2) tensor2fa(tens, tensors, aligned_dwi, "{}/tensors/".format(outdir), "{}/qa/tensors/".format(outdir)) # As we've only tested VTK plotting on MNI152 aligned data... if nb.load(mask).get_data().shape == (182, 218, 182): try: visualize_fibs(tracks, fibers, mask, "{}/qa/fibers/".format(outdir), 0.02) except: print("Fiber QA failed - VTK for Python not configured properly.") # And save them to disk np.savez(tensors, tens) np.savez(fibers, tracks) # Generate graphs from streamlines for each parcellation for idx, label in enumerate(label_name): print("Generating graph for {} parcellation...".format(label)) labels_im = nb.load(labels[idx]) g1 = mgg(len(np.unique(labels_im.get_data())) - 1, labels[idx]) g1.make_graph(tracks) g1.summary() g1.save_graph(graphs[idx], fmt=fmt) print("Execution took: {}".format(datetime.now() - startTime)) # Clean temp files if clean: print("Cleaning up intermediate files... ") cmd = 'rm -f {} tmp/{}* {} {}'.format(tensors, dwi_name, aligned_dwi, fibers) mgu.execute_cmd(cmd) print("Complete!")
def ndmg_pipeline(dti, bvals, bvecs, mprage, atlas, mask, labels, outdir, clean=False, fmt='gpickle'): """ Creates a brain graph from MRI data """ startTime = datetime.now() # Create derivative output directories dti_name = mgu().get_filename(dti) cmd = "".join(["mkdir -p ", outdir, "/reg_dti ", outdir, "/tensors ", outdir, "/fibers ", outdir, "/graphs"]) mgu().execute_cmd(cmd) # Graphs are different because of multiple atlases if isinstance(labels, list): label_name = [mgu().get_filename(x) for x in labels] for label in label_name: p = Popen("mkdir -p " + outdir + "/graphs/" + label, stdout=PIPE, stderr=PIPE, shell=True) else: label_name = mgu().get_filename(labels) p = Popen("mkdir -p " + outdir + "/graphs/" + label_name, stdout=PIPE, stderr=PIPE, shell=True) # Create derivative output file names aligned_dti = "".join([outdir, "/reg_dti/", dti_name, "_aligned.nii.gz"]) tensors = "".join([outdir, "/tensors/", dti_name, "_tensors.npz"]) fibers = "".join([outdir, "/fibers/", dti_name, "_fibers.npz"]) print("This pipeline will produce the following derivatives...") print("DTI volume registered to atlas: " + aligned_dti) print("Diffusion tensors in atlas space: " + tensors) print("Fiber streamlines in atlas space: " + fibers) # Again, graphs are different graphs = ["".join([outdir, "/graphs/", x, '/', dti_name, "_", x, '.', fmt]) for x in label_name] print("Graphs of streamlines downsampled to given labels: " + (", ".join([x for x in graphs]))) # Creates gradient table from bvalues and bvectors print "Generating gradient table..." dti1 = "".join([outdir, "/tmp/", dti_name, "_t1.nii.gz"]) bvecs1 = "".join([outdir, "/tmp/", dti_name, "_1.bvec"]) mgp.rescale_bvec(bvecs, bvecs1) gtab = mgu().load_bval_bvec_dti(bvals, bvecs1, dti, dti1) # Align DTI volumes to Atlas print("Aligning volumes...") mgr().dti2atlas(dti1, gtab, mprage, atlas, aligned_dti, outdir, clean) print("Beginning tractography...") # Compute tensors and track fiber streamlines tens, tracks = mgt().eudx_basic(aligned_dti, mask, gtab, stop_val=0.2) # And save them to disk np.savez(tensors, tens) np.savez(fibers, tracks) # Generate graphs from streamlines for each parcellation for idx, label in enumerate(label_name): print("Generating graph for " + label + "parcellation...") labels_im = nb.load(labels[idx]) g1 = mgg(len(np.unique(labels_im.get_data()))-1, labels[idx]) g1.make_graph(tracks) g1.summary() g1.save_graph(graphs[idx], fmt=fmt) print("Execution took: " + str(datetime.now() - startTime)) # Clean temp files if clean: print("Cleaning up intermediate files... ") cmd = "".join(['rm -f ', tensors, ' ', outdir, '/tmp/', dti_name, '*', ' ', aligned_dti, ' ', fibers]) mgu().execute_cmd(cmd) print("Complete!") pass