def run_over_subjects(print_only=False, use_scaled_masks=False): for subject_fol in utils.get_subfolders(SPM_ROOT): subject = utils.namebase(subject_fol) spm_brain_file = SPM_BRAIN_TEMPLATE.format(subject=subject.upper()) reg_file = '{}_register.lta'.format(subject) reg_spm_brain = '{}_reg.mgz'.format(subject) spm_map = os.path.basename( glob.glob(os.path.join(subject_fol, 'spmT_*.nii'))[0]) spm_mask = SPM_MASK_TEMPLATE.format(subject=subject.upper()) if use_scaled_masks: spm_mask_name, spm_mask_type = os.path.splitext(spm_mask) spm_mask = '{}_scaled{}'.format(spm_mask_name, spm_mask_type) spm_map_masked = '{}_masked.mgz'.format(os.path.splitext(spm_map)[0]) fs_hemi_map = FS_HEMI_MAP_TEMPLATE.format(subject=subject, hemi='{hemi}') run(subject_fol, spm_brain_file, FS_BRAIN_FILE, reg_file, reg_spm_brain, spm_map, spm_mask, spm_map_masked, fs_hemi_map, fs_subject=FS_SUBJECT, print_only=print_only)
def run_over_subjects(print_only=False, use_scaled_masks=False): for subject_fol in utils.get_subfolders(SPM_ROOT): subject = utils.namebase(subject_fol) spm_brain_file = SPM_BRAIN_TEMPLATE.format(subject=subject.upper()) reg_file = "{}_register.lta".format(subject) reg_spm_brain = "{}_reg.mgz".format(subject) spm_map = os.path.basename(glob.glob(os.path.join(subject_fol, "spmT_*.nii"))[0]) spm_mask = SPM_MASK_TEMPLATE.format(subject=subject.upper()) if use_scaled_masks: spm_mask_name, spm_mask_type = os.path.splitext(spm_mask) spm_mask = "{}_scaled{}".format(spm_mask_name, spm_mask_type) spm_map_masked = "{}_masked.mgz".format(os.path.splitext(spm_map)[0]) fs_hemi_map = FS_HEMI_MAP_TEMPLATE.format(subject=subject, hemi="{hemi}") run( subject_fol, spm_brain_file, FS_BRAIN_FILE, reg_file, reg_spm_brain, spm_map, spm_mask, spm_map_masked, fs_hemi_map, fs_subject=FS_SUBJECT, print_only=print_only, )
def scale_masks(scale = 10): for subject_fol in utils.get_subfolders(SPM_ROOT): subject = utils.namebase(subject_fol) spm_mask = SPM_MASK_TEMPLATE.format(subject=subject.upper()) spm_scaled_mask = os.path.join(subject_fol, '{}_scaled{}'.format(os.path.splitext(spm_mask)[0],os.path.splitext(spm_mask)[1])) img = nib.load(os.path.join(subject_fol, spm_mask)) data = img.get_data() affine = img.get_affine() scaled_data = data * scale new_img = nib.Nifti1Image(scaled_data, affine) nib.save(new_img, spm_scaled_mask)
def check_colors(): subjects_folders = utils.get_subfolders(SPM_ROOT) good_subjects = ['pp002', 'pp003', 'pp004', 'pp005', 'pp006'] subjects_folders = [os.path.join(SPM_ROOT, sub) for sub in good_subjects] subjects_colors = utils.get_spaced_colors(len(subjects_folders)) # subjects_colors = utils.arr_to_colors(range(len(subjects_folders)), colors_map='Set1') plt.figure() for subject_fol, color in zip(subjects_folders, subjects_colors): subject = utils.namebase(subject_fol) plt.scatter([0], [0], label='{} {}'.format(subject, color), c=color) plt.legend() plt.show()
def check_colors(): subjects_folders = utils.get_subfolders(SPM_ROOT) good_subjects = ["pp002", "pp003", "pp004", "pp005", "pp006"] subjects_folders = [os.path.join(SPM_ROOT, sub) for sub in good_subjects] subjects_colors = utils.get_spaced_colors(len(subjects_folders)) # subjects_colors = utils.arr_to_colors(range(len(subjects_folders)), colors_map='Set1') plt.figure() for subject_fol, color in zip(subjects_folders, subjects_colors): subject = utils.namebase(subject_fol) plt.scatter([0], [0], label="{} {}".format(subject, color), c=color) plt.legend() plt.show()
def scale_masks(scale=10): for subject_fol in utils.get_subfolders(SPM_ROOT): subject = utils.namebase(subject_fol) spm_mask = SPM_MASK_TEMPLATE.format(subject=subject.upper()) spm_scaled_mask = os.path.join( subject_fol, "{}_scaled{}".format(os.path.splitext(spm_mask)[0], os.path.splitext(spm_mask)[1]) ) img = nib.load(os.path.join(subject_fol, spm_mask)) data = img.get_data() affine = img.get_affine() scaled_data = data * scale new_img = nib.Nifti1Image(scaled_data, affine) nib.save(new_img, spm_scaled_mask)
horizontal_flip=args.h_flip, vertical_flip=args.v_flip) val_datagen = ImageDataGenerator( preprocessing_function=preprocessing_function) train_generator = train_datagen.flow_from_directory(TRAIN_DIR, target_size=(HEIGHT, WIDTH), batch_size=BATCH_SIZE) validation_generator = val_datagen.flow_from_directory( VAL_DIR, target_size=(HEIGHT, WIDTH), batch_size=BATCH_SIZE) # Save the list of classes for prediction mode later class_list = utils.get_subfolders(TRAIN_DIR) utils.save_class_list(class_list, model_name=args.model, dataset_name="") finetune_model = utils.build_finetune_model(base_model, dropout=args.dropout, fc_layers=FC_LAYERS, num_classes=len(class_list)) if args.continue_training: finetune_model.load_weights("./checkpoints/" + args.model + "_model_weights.h5") print("load success!") adam = Adam(lr=0.00001) finetune_model.compile(adam, loss='categorical_crossentropy',
val_datagen = ImageDataGenerator( preprocessing_function=preprocessing_function) train_generator = train_datagen.flow_from_directory( BASE_IMG_DIR + TRAIN_DIR, target_size=(HEIGHT, WIDTH), batch_size=BATCH_SIZE) validation_generator = val_datagen.flow_from_directory( BASE_IMG_DIR + VAL_DIR, target_size=(HEIGHT, WIDTH), batch_size=BATCH_SIZE) # Save the list of classes for prediction mode later class_list = utils.get_subfolders(BASE_IMG_DIR + TRAIN_DIR) utils.save_class_list(class_list, model_name=args.model) finetune_model = utils.build_finetune_model(base_model, dropout=args.dropout, fc_layers=FC_LAYERS, num_classes=len(class_list)) if args.continue_training: finetune_model.load_weights("./checkpoints/" + args.model + "_model_weights.h5") adam = Adam(lr=0.00001) finetune_model.compile(adam, loss='categorical_crossentropy', metrics=['accuracy'])