def main(args): print('args.directory: {}'.format(args.directory)) print('args.channels: {}'.format(args.channels)) directory = args.directory if args.datadir: directory = datadir_appender.datadir_appender(directory) print('datadir_appender made directory into: {}'.format(directory)) if args.channels == 'rg': colors = ['red', 'green'] print('Using red and green channels.') elif args.channels == 'r': colors = ['red'] print('Using red channel.') elif args.channels == 'g': colors = ['green'] print('Using green channel.') elif args.channels is None: colors = ['red', 'green'] print('Using red and green channels.') for color in colors: print('loading brain from {}'.format(directory)) brain = bbb.load_numpy_brain(os.path.join(directory, 'stitched_brain_{}.nii'.format(color))) # Bleaching correction (per voxel) brain = bbb.bleaching_correction(brain) # Z-score brain brain = bbb.z_score_brain(brain) zbrain_file = os.path.join(os.path.split(directory)[0], 'brain_zscored_{}.nii'.format(color)) bbb.save_brain(zbrain_file, brain) ################################ ### Create bleaching figures ### ################################ os.system("sbatch bleaching_qc.sh {}".format(os.path.split(directory)[0])) ################### ### Perform PCA ### ################### jobid = subprocess.check_output('sbatch pca.sh {}'.format(os.path.split(directory)[0]),shell=True) # Get job ids so we can use them as dependencies jobid_str = jobid.decode('utf-8') jobid_str = [x for x in jobid_str.split() if x.isdigit()][0] print('jobid: {}'.format(jobid_str)) job_ids = [] job_ids.append(jobid_str) # Create weird job string slurm wants job_ids_colons = ':'.join(job_ids) print('Colons: {}'.format(job_ids_colons)) ######################################## ### Once PCA done, perform quick GLM ### ######################################## os.system("sbatch --dependency=afterany:{} quick_glm.sh {}".format(job_ids_colons, os.path.split(directory)[0]))
def main(directory): print_mem() brain_path = os.path.join(directory, 'brain_zscored_green.nii') brain = bbb.load_numpy_brain(brain_path) dims = bbb.get_dims(brain) print('brain shape is x,y,z,t {}'.format(brain.shape)) print_mem() t0 = time() X = brain.reshape(-1, brain.shape[-1]).T #X = brain #X.shape = (-1,brain.shape[-1]) #X = X.T print('X is time by voxels {}'.format(X.shape)) print('brain shape is {}'.format(brain.shape)) print('Reshape duration: {}'.format(time() - t0)) print_mem() t0 = time() #pca = PCA().fit(X) num_components = X.shape[0] pca_ = IncrementalPCA(n_components=num_components, batch_size=100000) pca = pca_.fit(X) print_mem() pca_scores = pca.components_ print('Scores is PC by voxel {}'.format(pca_scores.shape)) pca_loadings = pca.transform(X) print('Loadings is time by PC {}'.format(pca_loadings.shape)) pca_spatial = np.reshape(pca_scores, (-1, dims['y'], dims['x'], dims['z'])) print('Spatial is {}'.format(pca_spatial.shape)) print('PCA duration: {}'.format(time() - t0)) pca_directory = os.path.join(directory, 'pca') if not os.path.exists(pca_directory): os.mkdir(pca_directory) save_file = os.path.join(pca_directory, 'scores_(spatial).npy') np.save(save_file, pca_spatial) save_file = os.path.join(pca_directory, 'loadings_(temporal).npy') np.save(save_file, pca_loadings) print('Saved PCA!') print_mem()
def main(args): directory = args[0] motcorr_directory = args[1] master_path = args[2] slave_path = args[3] master_path_mean = args[4] vol_start = int(args[5]) vol_end = int(args[6]) # For the sake of memory, lets try to load only the part of the brain we will need. master_brain = load_partial_brain(master_path, vol_start, vol_end) slave_brain = load_partial_brain(slave_path, vol_start, vol_end) mean_brain = ants.from_numpy(bbb.load_numpy_brain(master_path_mean)) bbb.motion_correction(master_brain, slave_brain, directory, motcorr_directory, meanbrain=mean_brain, suffix='_' + str(vol_start))
def main(args): ''' Lets write this asssuming there are files: functional_channel_1.nii, serving as red master functional_channel_2.nii, serving as green slave ''' # logfile = args['logfile'] # flagged_dir = args['flagged_dir'] # target_path = args['dataset_path'] # printlog = getattr(flow.Printlog(logfile=logfile), 'print_to_log') # printlog('\nBuilding fly from directory {}'.format(flagged_dir)) path = args[0] ### Create mean brain imaging_path = os.path.join(path, 'imaging') master_brain_path = os.path.join(imaging_path, 'functional_channel_1.nii') slave_brain_path = os.path.join(imaging_path, 'functional_channel_2.nii') print('Using master brain {}'.format(master_brain_path)) master_brain = bbb.load_numpy_brain(master_brain_path) master_brain_mean = bbb.make_meanbrain(master_brain) master_brain_mean_file = os.path.join(imaging_path, 'functional_channel_1_mean.nii') bbb.save_brain(master_brain_mean_file, master_brain_mean) print('Saved mean brain {}'.format(master_brain_mean_file)) # How many volumes? num_vols = np.shape(master_brain)[-1] # Clear memory master_brain = None master_brain_mean = None time.sleep(5) ### Make subfolder if it doesn't exist subfolder = 'motcorr' motcorr_directory = os.path.join(path, subfolder) if not os.path.exists(motcorr_directory): os.makedirs(motcorr_directory) ### Start fleet of motcorr_partial.sh, giving each the correct portion of data #num_vols = 5 can do this to test step = 100 # can reduce this for testing job_ids = [] for i in range(0, num_vols, step): vol_start = i vol_end = i + step # handle last section if vol_end > num_vols: vol_end = num_vols ### SUBMIT JOB ### jobid = subprocess.check_output( 'sbatch motcorr_partial.sh {} {} {} {} {} {} {}'.format( path, motcorr_directory, master_brain_path, slave_brain_path, master_brain_mean_file, vol_start, vol_end), shell=True) # Get job ids so we can use them as dependencies jobid_str = jobid.decode('utf-8') jobid_str = [x for x in jobid_str.split() if x.isdigit()][0] print('jobid: {}'.format(jobid_str)) job_ids.append(jobid_str) ### Start motcorr_stitcher.sh with dependences on all jobs above finishing ### # Create weird job string slurm wants job_ids_colons = ':'.join(job_ids) print('Colons: {}'.format(job_ids_colons)) os.system('sbatch --dependency=afterany:{} motcorr_stitcher.sh {}'.format( job_ids_colons, motcorr_directory))
def main(args): print('Stitcher started.') directory = args.directory print('directory: {}'.format(directory)) if args.datadir: directory = datadir_appender.datadir_appender(directory) # directory will contain motcorr_green_x.nii and motcorr_red_x.nii # get list of reds and greens reds = [] greens = [] for item in os.listdir(directory): # sanity check that it is .nii if '.nii' in item: if 'red' in item: reds.append(item) elif 'green' in item: greens.append(item) # need to order correctly for correct stitching bbb.sort_nicely(greens) bbb.sort_nicely(reds) # add directory path reds = [os.path.join(directory, x) for x in reds] greens = [os.path.join(directory, x) for x in greens] if args.channels == 'rg': colors = ['red', 'green'] channels = [reds, greens] print('Using red and green channels.') elif args.channels == 'r': colors = ['red'] channels = [reds] print('Using red channel.') elif args.channels == 'g': colors = ['green'] channels = [greens] print('Using green channel.') elif args.channels is None: colors = ['red', 'green'] channels = [reds, greens] print('Using red and green channels.') ### load brains ### # This part in based on the input argparse for i, channel in enumerate(channels): brains = [] for brain_file in channel: brain = bbb.load_numpy_brain(brain_file) # Handle edgecase of single volume brain if len(np.shape(brain)) == 3: brain = brain[:, :, :, np.newaxis] print('shape of partial brain: {}'.format(np.shape(brain))) brains.append(brain) print('brains len: {}'.format(len(brains))) stitched_brain = np.concatenate(brains, axis=-1) print('stitched_brain shape: {}'.format(np.shape(stitched_brain))) save_file = os.path.join(directory, 'stitched_brain_{}.nii'.format(colors[i])) bbb.save_brain(save_file, stitched_brain) stitched_brain = None # delete partial brains [os.remove(file) for file in channel] ### Stitch motcorr params and create motcorr graph # get motcorr param files motcorr_param_files = [] for item in os.listdir(directory): if '.npy' in item: file = os.path.join(directory, item) motcorr_param_files.append(file) bbb.sort_nicely(motcorr_param_files) # Load motcorr param files (needed to sort first) motcorr_params = [] for file in motcorr_param_files: motcorr_params.append(np.load(file)) if len(motcorr_params) > 0: stitched_params = np.concatenate(motcorr_params, axis=0) save_file = os.path.join(directory, 'motcorr_params_stitched') np.save(save_file, stitched_params) [os.remove(file) for file in motcorr_param_files] xml_dir = os.path.join(os.path.split(directory)[0], 'imaging') print('directory: {}'.format(directory)) print('xml_dir: {}'.format(xml_dir)) sys.stdout.flush() bbb.save_motion_figure(stitched_params, xml_dir, directory) else: print('Empty motcorr params - skipping saving moco figure.') ### START Z-SCORING ### os.system("sbatch zscore.sh {}".format(directory))