def main(args): print('args.directory: {}'.format(args.directory)) print('args.channels: {}'.format(args.channels)) directory = args.directory if args.datadir: directory = datadir_appender.datadir_appender(directory) print('datadir_appender made directory into: {}'.format(directory)) if args.channels == 'rg': colors = ['red', 'green'] print('Using red and green channels.') elif args.channels == 'r': colors = ['red'] print('Using red channel.') elif args.channels == 'g': colors = ['green'] print('Using green channel.') elif args.channels is None: colors = ['red', 'green'] print('Using red and green channels.') for color in colors: print('loading brain from {}'.format(directory)) brain = bbb.load_numpy_brain(os.path.join(directory, 'stitched_brain_{}.nii'.format(color))) # Bleaching correction (per voxel) brain = bbb.bleaching_correction(brain) # Z-score brain brain = bbb.z_score_brain(brain) zbrain_file = os.path.join(os.path.split(directory)[0], 'brain_zscored_{}.nii'.format(color)) bbb.save_brain(zbrain_file, brain) ################################ ### Create bleaching figures ### ################################ os.system("sbatch bleaching_qc.sh {}".format(os.path.split(directory)[0])) ################### ### Perform PCA ### ################### jobid = subprocess.check_output('sbatch pca.sh {}'.format(os.path.split(directory)[0]),shell=True) # Get job ids so we can use them as dependencies jobid_str = jobid.decode('utf-8') jobid_str = [x for x in jobid_str.split() if x.isdigit()][0] print('jobid: {}'.format(jobid_str)) job_ids = [] job_ids.append(jobid_str) # Create weird job string slurm wants job_ids_colons = ':'.join(job_ids) print('Colons: {}'.format(job_ids_colons)) ######################################## ### Once PCA done, perform quick GLM ### ######################################## os.system("sbatch --dependency=afterany:{} quick_glm.sh {}".format(job_ids_colons, os.path.split(directory)[0]))
def main(directory): ### Load PCA save_file = os.path.join(directory, 'pca', 'scores_(spatial).npy') pca_spatial = np.load(save_file) save_file = os.path.join(directory, 'pca', 'loadings_(temporal).npy') pca_loadings = np.load(save_file) print('pca_loadings_shape: {}'.format(pca_loadings.shape)) ### Load timestamps timestamps = bbb.load_timestamps(os.path.join(directory, 'imaging')) ### Load fictrac fictrac_raw = bbb.load_fictrac(os.path.join(directory, 'fictrac')) fictrac = bbb.smooth_and_interp_fictrac(fictrac_raw, fps=50, resolution=10, expt_len=1000 * 30 * 60, behavior='dRotLabY', timestamps=timestamps) ### Fit model num_pcs = 100 Y_glm = fictrac X_glm = pca_loadings[:, :num_pcs] model = LassoCV().fit(X_glm, Y_glm) score = model.score(X_glm, Y_glm) brain_map = np.tensordot(model.coef_, pca_spatial[:num_pcs, :, :, :], axes=1) pca_glm_directory = os.path.join(directory, 'pca_glm') if not os.path.exists(pca_glm_directory): os.mkdir(pca_glm_directory) save_file = os.path.join(pca_glm_directory, 'forward.nii') bbb.save_brain(save_file, brain_map)
def main(args): ''' Lets write this asssuming there are files: functional_channel_1.nii, serving as red master functional_channel_2.nii, serving as green slave ''' # logfile = args['logfile'] # flagged_dir = args['flagged_dir'] # target_path = args['dataset_path'] # printlog = getattr(flow.Printlog(logfile=logfile), 'print_to_log') # printlog('\nBuilding fly from directory {}'.format(flagged_dir)) path = args[0] ### Create mean brain imaging_path = os.path.join(path, 'imaging') master_brain_path = os.path.join(imaging_path, 'functional_channel_1.nii') slave_brain_path = os.path.join(imaging_path, 'functional_channel_2.nii') print('Using master brain {}'.format(master_brain_path)) master_brain = bbb.load_numpy_brain(master_brain_path) master_brain_mean = bbb.make_meanbrain(master_brain) master_brain_mean_file = os.path.join(imaging_path, 'functional_channel_1_mean.nii') bbb.save_brain(master_brain_mean_file, master_brain_mean) print('Saved mean brain {}'.format(master_brain_mean_file)) # How many volumes? num_vols = np.shape(master_brain)[-1] # Clear memory master_brain = None master_brain_mean = None time.sleep(5) ### Make subfolder if it doesn't exist subfolder = 'motcorr' motcorr_directory = os.path.join(path, subfolder) if not os.path.exists(motcorr_directory): os.makedirs(motcorr_directory) ### Start fleet of motcorr_partial.sh, giving each the correct portion of data #num_vols = 5 can do this to test step = 100 # can reduce this for testing job_ids = [] for i in range(0, num_vols, step): vol_start = i vol_end = i + step # handle last section if vol_end > num_vols: vol_end = num_vols ### SUBMIT JOB ### jobid = subprocess.check_output( 'sbatch motcorr_partial.sh {} {} {} {} {} {} {}'.format( path, motcorr_directory, master_brain_path, slave_brain_path, master_brain_mean_file, vol_start, vol_end), shell=True) # Get job ids so we can use them as dependencies jobid_str = jobid.decode('utf-8') jobid_str = [x for x in jobid_str.split() if x.isdigit()][0] print('jobid: {}'.format(jobid_str)) job_ids.append(jobid_str) ### Start motcorr_stitcher.sh with dependences on all jobs above finishing ### # Create weird job string slurm wants job_ids_colons = ':'.join(job_ids) print('Colons: {}'.format(job_ids_colons)) os.system('sbatch --dependency=afterany:{} motcorr_stitcher.sh {}'.format( job_ids_colons, motcorr_directory))
def main(args): print('Stitcher started.') directory = args.directory print('directory: {}'.format(directory)) if args.datadir: directory = datadir_appender.datadir_appender(directory) # directory will contain motcorr_green_x.nii and motcorr_red_x.nii # get list of reds and greens reds = [] greens = [] for item in os.listdir(directory): # sanity check that it is .nii if '.nii' in item: if 'red' in item: reds.append(item) elif 'green' in item: greens.append(item) # need to order correctly for correct stitching bbb.sort_nicely(greens) bbb.sort_nicely(reds) # add directory path reds = [os.path.join(directory, x) for x in reds] greens = [os.path.join(directory, x) for x in greens] if args.channels == 'rg': colors = ['red', 'green'] channels = [reds, greens] print('Using red and green channels.') elif args.channels == 'r': colors = ['red'] channels = [reds] print('Using red channel.') elif args.channels == 'g': colors = ['green'] channels = [greens] print('Using green channel.') elif args.channels is None: colors = ['red', 'green'] channels = [reds, greens] print('Using red and green channels.') ### load brains ### # This part in based on the input argparse for i, channel in enumerate(channels): brains = [] for brain_file in channel: brain = bbb.load_numpy_brain(brain_file) # Handle edgecase of single volume brain if len(np.shape(brain)) == 3: brain = brain[:, :, :, np.newaxis] print('shape of partial brain: {}'.format(np.shape(brain))) brains.append(brain) print('brains len: {}'.format(len(brains))) stitched_brain = np.concatenate(brains, axis=-1) print('stitched_brain shape: {}'.format(np.shape(stitched_brain))) save_file = os.path.join(directory, 'stitched_brain_{}.nii'.format(colors[i])) bbb.save_brain(save_file, stitched_brain) stitched_brain = None # delete partial brains [os.remove(file) for file in channel] ### Stitch motcorr params and create motcorr graph # get motcorr param files motcorr_param_files = [] for item in os.listdir(directory): if '.npy' in item: file = os.path.join(directory, item) motcorr_param_files.append(file) bbb.sort_nicely(motcorr_param_files) # Load motcorr param files (needed to sort first) motcorr_params = [] for file in motcorr_param_files: motcorr_params.append(np.load(file)) if len(motcorr_params) > 0: stitched_params = np.concatenate(motcorr_params, axis=0) save_file = os.path.join(directory, 'motcorr_params_stitched') np.save(save_file, stitched_params) [os.remove(file) for file in motcorr_param_files] xml_dir = os.path.join(os.path.split(directory)[0], 'imaging') print('directory: {}'.format(directory)) print('xml_dir: {}'.format(xml_dir)) sys.stdout.flush() bbb.save_motion_figure(stitched_params, xml_dir, directory) else: print('Empty motcorr params - skipping saving moco figure.') ### START Z-SCORING ### os.system("sbatch zscore.sh {}".format(directory))