def get_data(bucket, remote_path, local, subj=None, public=True): """ Given an s3 bucket, data location on the bucket, and a download location, crawls the bucket and recursively pulls all data. """ client = boto3.client('s3') if not public: bkts = [bk['Name'] for bk in client.list_buckets()['Buckets']] if bucket not in bkts: sys.exit("Error: could not locate bucket. Available buckets: " + ", ".join(bkts)) cmd = 'aws' if public: cmd += ' --no-sign-request --region=us-east-1' cmd = "".join([cmd, ' s3 cp --recursive s3://', bucket, '/', remote_path]) if subj is not None: cmd = "".join([cmd, '/sub-', subj]) std, err = mgu().execute_cmd('mkdir -p ' + local + '/sub-' + subj) local += '/sub-' + subj cmd = "".join([cmd, ' ', local]) std, err = mgu().execute_cmd(cmd) return
def multigraphs(fibers, labels, outdir): """ Creates a brain graph from fiber streamlines """ startTime = datetime.now() fiber_name = mgu().get_filename(fibers) base = fiber_name.split('_fibers', 1)[0] # Create output directories for graphs label_name = [mgu().get_filename(x) for x in labels] for label in label_name: p = Popen("mkdir -p " + outdir + "/graphs/" + label, stdout=PIPE, stderr=PIPE, shell=True) # Create names of files to be produced graphs = [outdir + "/graphs/" + x + '/' + base + "_" + x + ".graphml" for x in label_name] print "Graphs of streamlines downsampled to given labels: " +\ (", ".join([x for x in graphs])) # Load fibers print "Loading fibers..." fiber_npz = np.load(fibers) tracks = fiber_npz[fiber_npz.keys()[0]] # Generate graphs from streamlines for each parcellation for idx, label in enumerate(label_name): print "Generating graph for " + label + " parcellation..." labels_im = nb.load(labels[idx]) g1 = mgg(len(np.unique(labels_im.get_data()))-1, labels[idx]) g1.make_graph(tracks) g1.summary() g1.save_graph(graphs[idx]) print "Execution took: " + str(datetime.now() - startTime) print "Complete!" pass
def dti2atlas(self, dti, gtab, mprage, atlas, aligned_dti, outdir): """ Aligns two images and stores the transform between them **Positional Arguments:** dti: - Input impage to be aligned as a nifti image file bvals: - File containing list of bvalues for each scan bvecs: - File containing gradient directions for each scan mprage: - Intermediate image being aligned to as a nifti image file atlas: - Terminal image being aligned to as a nifti image file aligned_dti: - Aligned output dti image as a nifti image file """ # Creates names for all intermediate files used # GK TODO: come up with smarter way to create these temp file names dti_name = op.splitext(op.splitext(op.basename(dti))[0])[0] mprage_name = op.splitext(op.splitext(op.basename(mprage))[0])[0] atlas_name = op.splitext(op.splitext(op.basename(atlas))[0])[0] dti2 = outdir + "/tmp/" + dti_name + "_t2.nii.gz" temp_aligned = outdir + "/tmp/" + dti_name + "_ta.nii.gz" b0 = outdir + "/tmp/" + dti_name + "_b0.nii.gz" xfm1 = outdir + "/tmp/" + dti_name + "_" + mprage_name + "_xfm.mat" xfm2 = outdir + "/tmp/" + mprage_name + "_" + atlas_name + "_xfm.mat" xfm3 = outdir + "/tmp/" + dti_name + "_" + atlas_name + "_xfm.mat" # Align DTI volumes to each other self.align_slices(dti, dti2, np.where(gtab.b0s_mask)[0]) # Loads DTI image in as data and extracts B0 volume import ndmg.utils as mgu dti_im = nb.load(dti2) b0_im = mgu().get_b0(gtab, dti_im.get_data()) # GK TODO: why doesn't top import work? # Wraps B0 volume in new nifti image b0_head = dti_im.get_header() b0_head.set_data_shape(b0_head.get_data_shape()[0:3]) b0_out = nb.Nifti1Image(b0_im, affine=dti_im.get_affine(), header=b0_head) b0_out.update_header() nb.save(b0_out, b0) # Algins B0 volume to MPRAGE, and MPRAGE to Atlas self.align(b0, mprage, xfm1) self.align(mprage, atlas, xfm2) # Combines transforms from previous registrations in proper order cmd = "convert_xfm -omat " + xfm3 + " -concat " + xfm2 + " " + xfm1 p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) p.communicate() # Applies combined transform to dti image volume self.applyxfm(dti2, atlas, xfm3, temp_aligned) self.resample(temp_aligned, aligned_dti, atlas) # Clean temp files cmd = "rm -f " + dti2 + " " + temp_aligned + " " + b0 + " " +\ xfm1 + " " + xfm2 + " " + xfm3 print "Cleaning temporary registration files..." p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) p.communicate()
def ndmg_pipeline(dti, bvals, bvecs, mprage, atlas, mask, labels, outdir, clean=False, fmt='gpickle'): """ Creates a brain graph from MRI data """ startTime = datetime.now() print fmt # Create derivative output directories dti_name = op.splitext(op.splitext(op.basename(dti))[0])[0] cmd = "mkdir -p " + outdir + "/reg_dti " + outdir + "/tensors " +\ outdir + "/fibers " + outdir + "/graphs" p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) p.communicate() # Graphs are different because of multiple atlases label_name = [op.splitext(op.splitext(op.basename(x))[0])[0] for x in labels] for label in label_name: p = Popen("mkdir -p " + outdir + "/graphs/" + label, stdout=PIPE, stderr=PIPE, shell=True) # Create derivative output file names aligned_dti = outdir + "/reg_dti/" + dti_name + "_aligned.nii.gz" tensors = outdir + "/tensors/" + dti_name + "_tensors.npz" fibers = outdir + "/fibers/" + dti_name + "_fibers.npz" print "This pipeline will produce the following derivatives..." print "DTI volume registered to atlas: " + aligned_dti print "Diffusion tensors in atlas space: " + tensors print "Fiber streamlines in atlas space: " + fibers # Again, graphs are different graphs = [outdir + "/graphs/" + x + '/' + dti_name + "_" + x + '.' + fmt for x in label_name] print "Graphs of streamlines downsampled to given labels: " +\ (", ".join([x for x in graphs])) # Creates gradient table from bvalues and bvectors print "Generating gradient table..." dti1 = outdir + "/tmp/" + dti_name + "_t1.nii.gz" bvecs1 = outdir + "/tmp/" + dti_name + "_1.bvec" mgp.rescale_bvec(bvecs, bvecs1) gtab = mgu().load_bval_bvec_dti(bvals, bvecs1, dti, dti1) # Align DTI volumes to Atlas print "Aligning volumes..." mgr().dti2atlas(dti1, gtab, mprage, atlas, aligned_dti, outdir) print "Beginning tractography..." # Compute tensors and track fiber streamlines tens, tracks = mgt().eudx_basic(aligned_dti, mask, gtab, stop_val=0.2) # And save them to disk np.savez(tensors, tens) np.savez(fibers, tracks) # Generate graphs from streamlines for each parcellation for idx, label in enumerate(label_name): print "Generating graph for " + label + " parcellation..." labels_im = nb.load(labels[idx]) g1 = mgg(len(np.unique(labels_im.get_data()))-1, labels[idx]) g1.make_graph(tracks) g1.summary() g1.save_graph(graphs[idx]) print "Execution took: " + str(datetime.now() - startTime) # Clean temp files if clean: print "Cleaning up intermediate files... " cmd = 'rm -f ' + tensors + ' ' + dti1 + ' ' + aligned_dti + ' ' +\ bvecs1 p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) p.communicate() print "Complete!" pass
def ndmg_pipeline(dti, bvals, bvecs, mprage, atlas, mask, labels, outdir, clean=False, fmt='gpickle'): """ Creates a brain graph from MRI data """ startTime = datetime.now() # Create derivative output directories dti_name = mgu().get_filename(dti) cmd = "".join(["mkdir -p ", outdir, "/reg_dti ", outdir, "/tensors ", outdir, "/fibers ", outdir, "/graphs"]) mgu().execute_cmd(cmd) # Graphs are different because of multiple atlases if isinstance(labels, list): label_name = [mgu().get_filename(x) for x in labels] for label in label_name: p = Popen("mkdir -p " + outdir + "/graphs/" + label, stdout=PIPE, stderr=PIPE, shell=True) else: label_name = mgu().get_filename(labels) p = Popen("mkdir -p " + outdir + "/graphs/" + label_name, stdout=PIPE, stderr=PIPE, shell=True) # Create derivative output file names aligned_dti = "".join([outdir, "/reg_dti/", dti_name, "_aligned.nii.gz"]) tensors = "".join([outdir, "/tensors/", dti_name, "_tensors.npz"]) fibers = "".join([outdir, "/fibers/", dti_name, "_fibers.npz"]) print("This pipeline will produce the following derivatives...") print("DTI volume registered to atlas: " + aligned_dti) print("Diffusion tensors in atlas space: " + tensors) print("Fiber streamlines in atlas space: " + fibers) # Again, graphs are different graphs = ["".join([outdir, "/graphs/", x, '/', dti_name, "_", x, '.', fmt]) for x in label_name] print("Graphs of streamlines downsampled to given labels: " + (", ".join([x for x in graphs]))) # Creates gradient table from bvalues and bvectors print "Generating gradient table..." dti1 = "".join([outdir, "/tmp/", dti_name, "_t1.nii.gz"]) bvecs1 = "".join([outdir, "/tmp/", dti_name, "_1.bvec"]) mgp.rescale_bvec(bvecs, bvecs1) gtab = mgu().load_bval_bvec_dti(bvals, bvecs1, dti, dti1) # Align DTI volumes to Atlas print("Aligning volumes...") mgr().dti2atlas(dti1, gtab, mprage, atlas, aligned_dti, outdir, clean) print("Beginning tractography...") # Compute tensors and track fiber streamlines tens, tracks = mgt().eudx_basic(aligned_dti, mask, gtab, stop_val=0.2) # And save them to disk np.savez(tensors, tens) np.savez(fibers, tracks) # Generate graphs from streamlines for each parcellation for idx, label in enumerate(label_name): print("Generating graph for " + label + "parcellation...") labels_im = nb.load(labels[idx]) g1 = mgg(len(np.unique(labels_im.get_data()))-1, labels[idx]) g1.make_graph(tracks) g1.summary() g1.save_graph(graphs[idx], fmt=fmt) print("Execution took: " + str(datetime.now() - startTime)) # Clean temp files if clean: print("Cleaning up intermediate files... ") cmd = "".join(['rm -f ', tensors, ' ', outdir, '/tmp/', dti_name, '*', ' ', aligned_dti, ' ', fibers]) mgu().execute_cmd(cmd) print("Complete!") pass