def trainColor(): from model import unet from data import trainGenerator from keras.callbacks import ModelCheckpoint from keras.optimizers import Adam from keras.callbacks import ReduceLROnPlateau data_gen_args = dict(rotation_range=0.2, width_shift_range=0.05, height_shift_range=0.05, shear_range=0.05, zoom_range=0.05, horizontal_flip=True, fill_mode='nearest') targetSize = (256, 256) myGene = trainGenerator(4, 'data/membrane/train', 'colorImage', 'label', data_gen_args, image_color_mode='rgb', target_size=targetSize, save_to_dir=None) model = unet(input_size=targetSize + (3,)) model.compile(optimizer=Adam(lr=1e-4), loss='binary_crossentropy', metrics=['accuracy']) chckPtsDir = 'checkpoints' os.makedirs(chckPtsDir, exist_ok=True) chckPtsPath = os.path.join(chckPtsDir, 'unet_membrane_{epoch}_{loss:.3f}_{acc:.3f}.hdf5') model_checkpoint = ModelCheckpoint(chckPtsPath, monitor='loss', verbose=1, save_best_only=True) # reduce_lr = ReduceLROnPlateau(monitor='loss', factor=0.2, patience=4, min_lr=0.001) model.fit_generator(myGene, steps_per_epoch=2000, epochs=17, callbacks=[model_checkpoint])
def train(args): # data augmentation configuration data_gen_args = dict(rotation_range=0.2, width_shift_range=0.05, height_shift_range=0.05, shear_range=0.05, zoom_range=0.05, horizontal_flip=True, fill_mode='nearest') # training set myGene = trainGenerator(2, os.path.join(args.root, 'train'), 'image', 'label', data_gen_args, target_size=(512, 512), save_to_dir=None) # model model = unet() model_checkpoint = ModelCheckpoint(args.save_path, monitor='loss', verbose=1, save_best_only=True) # train model.fit_generator(myGene, steps_per_epoch=2000, epochs=5, callbacks=[model_checkpoint])
def train_unet(cfg, resume=True): input_size = cfg.input_size pretrained_fpath = cfg.pretrained_fpath model_dpath = cfg.model_dpath loss = cfg.loss train_dpath = cfg.train_dpath lr = cfg.lr epochs = cfg.epochs mfpath = cfg.mfpath #model_dpath + '/ep{epoch:03d}-loss{loss:.3f}-acc{accuracy:.3f}.h5' last_epoch = cfg.get_lastepoch() print(last_epoch) # (1) 创建训练生成器 augdict = dict(rotation_range=0.2, width_shift_range=0.05, height_shift_range=0.05, shear_range=0.05, zoom_range=0.05, horizontal_flip=True, fill_mode='nearest') train_gen = trainGenerator(4, train_dpath, 'image', 'label', augdict, save_to_dir=None) if False: for i, batch in enumerate(train_gen): if i >= 3: break # (2) 编译和训练模型, (可选地加载预训练模型,继续训练) model = myunet(input_size) model.compile(optimizer=Adam(lr=lr), loss=loss, metrics=['accuracy']) #model.summary() # 加载预训练模型 if resume and (pretrained_fpath) and os.path.exists(pretrained_fpath): try: model.load_weights(pretrained_fpath) except Exception as exc: print("Exception: {}".format(exc)) # 训练模型 model_checkpoint = ModelCheckpoint(mfpath, monitor='loss', verbose=1, save_best_only=True) model_tb = keras.callbacks.TensorBoard(log_dir=model_dpath, histogram_freq=0, write_graph=False, write_images=False) model.fit_generator(train_gen, steps_per_epoch=1000, epochs=epochs, callbacks=[model_checkpoint, model_tb], initial_epoch=last_epoch) model.save(pretrained_fpath)
def trainGrayscale(): from model import unet from data import trainGenerator from keras.callbacks import ModelCheckpoint from keras.optimizers import Adam data_gen_args = dict(rotation_range=0.2, width_shift_range=0.05, height_shift_range=0.05, shear_range=0.05, zoom_range=0.05, horizontal_flip=True, fill_mode='nearest') myGene = trainGenerator(4, 'data/membrane/train', 'image', 'label', data_gen_args, save_to_dir=None) model = unet() model.compile(optimizer=Adam(lr=1e-4), loss='binary_crossentropy', metrics=['accuracy']) model_checkpoint = ModelCheckpoint('unet_membrane_{epoch}_{loss:.3f}_{acc:.3f}.hdf5', monitor='loss', verbose=1, save_best_only=True) model.fit_generator(myGene, steps_per_epoch=500, epochs=5, callbacks=[model_checkpoint])
def testFlow(): from data import trainGenerator data_gen_args = dict(rotation_range=0.2, width_shift_range=0.05, height_shift_range=0.05, shear_range=0.05, zoom_range=0.05, horizontal_flip=True, fill_mode='nearest') targetSize = (16, 16) myGene = trainGenerator(1, 'data', 'image', 'multi_class_masks', data_gen_args, image_color_mode='rgb', target_size=targetSize, flag_multi_class=True, num_class=6, save_to_dir=None) for _ in range(4): im, mask = next(myGene) im
def train(): # доступные классы # 'ground', 'tree', 'bush', 'tower', 'wires', 'copter', 'car', 'build' channels = ['bush'] #претренированные веса: pretrained_weights_path = weights_path #сохранить в веса: save_weights_path = weights_path if not os.path.exists('weights'): os.makedirs('weights') myGene = trainGenerator(channels) model = unet(len(channels), pretrained_weights=pretrained_weights_path) model_checkpoint = ModelCheckpoint(save_weights_path, monitor='loss', verbose=1, save_best_only=True) model.fit_generator(myGene, steps_per_epoch=600, epochs=1000, callbacks=[model_checkpoint])
def generate_data(batch_size, train_path, aug_dict, target_size): """ return data generator together with number of samples in the folder input: batch_size -- int, batch size train_path -- str, dir to "image", "label" folder aug_dict -- dict, dict of augmentation argument target_size -- tup, image shape for model input output: data_gen -- data generator data_n -- number of samples """ data_gen = trainGenerator(batch_size=batch_size, train_path=train_path, image_folder='image', mask_folder='label', aug_dict=aug_dict, target_size=target_size) data_n = len([ i for i in os.listdir(os.path.join(train_path, 'image')) if i.endswith('.jpg') ]) return data_gen, data_n
path_to_contours = '/home/ianben/Breast_MRI_save/' runName = '32' MPU = True data_gen_args = dict(rotation_range=0.2, width_shift_range=0.05, height_shift_range=0.05, shear_range=0.05, zoom_range=0.05, horizontal_flip=True, fill_mode='nearest') train_path = path_to_contours + 'train/' myGene = data.trainGenerator(batch_size=2, train_path=train_path, aug_dict=data_gen_args, save_to_dir=None) model = models.unet() model.summary() model_checkpoint = ModelCheckpoint('unet_checkpoints.hdf5', monitor='loss', verbose=1, save_best_only=True) if MPU: #set up multi gpu processing config = tf.ConfigProto() config.gpu_options.allow_growth = True sess = tf.Session(config=config)
os.chdir('src') import mover #used to download data if needed os.chdir(home) batch_size = 1 #define date string. use if making many models in short period of time date = datetime.datetime.now().strftime("%m%d%Y_%H%M%S") #define save name for model mNameOld = r'saved models/final_resampled_3-22-2020.hdf5' #set up training and validation dataset generators. see data.py. print('Setting up dataset use...') generator = data.trainGenerator(batch_size, train_path='data\\train' + sourceparam, image_folder='image', gt_folder='label', subset='training') mName = 'new_model.hdf5' mName = r'saved models/' + mName valGen = data.valGenerator(sourceparam) #%% model training if new: Model = model.unet() #see model.py, initialize u-net architecture #Model=model.unet() #see model.py, initialize u-net architecture else: Model = model.unet(mNameOld) #set up keras callbacks- used in training call model_checkpoint = ModelCheckpoint( mName, monitor='loss', verbose=2,
#test_image_num = len(os.listdir(fileDir)) data_gen_args = dict( rotation_range=90., #width_shift_range=0.1, #height_shift_range=0.1, #shear_range=0.1, #zoom_range=0.1, fill_mode='nearest', #horizontal_flip = True, #vertical_flip = True ) train_Gene = trainGenerator(8, 'data/flower/train', 'image_crops', 'mask_crops', data_gen_args, save_to_dir=None) val_Gene = trainGenerator(8, 'data/flower/test', 'images_crops', 'masks_crops', data_gen_args) reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.2, patience=3, verbose=0, mode='min', epsilon=1e-4, cooldown=0, min_lr=1e-6) visual = TensorBoard(log_dir='./unet_log', histogram_freq=0,
path_input = "images/data" path_mask = "images/data" path_test_result = "images/test" data_gen_args = dict(rotation_range=0.2, width_shift_range=0.05, height_shift_range=0.05, shear_range=0.05, zoom_range=0.05, horizontal_flip=True, fill_mode='nearest') myGene = trainGenerator(2, path_input, 'images_splited', 'processed_images_splited', data_gen_args, save_to_dir='images/aug') model = unet('water_derection_unet_weights.hdf5') model_checkpoint = ModelCheckpoint('water_derection_unet_weights.hdf5', monitor='loss', verbose=1, save_best_only=True) model.fit_generator(myGene, steps_per_epoch=300, epochs=3, callbacks=[model_checkpoint]) # testGene = testGenerator(path_test_result,5) # results = model.predict_generator(testGene,5,verbose=1)
data_gen_args = dict(samplewise_std_normalization=True, samplewise_center=True, rotation_range=25, width_shift_range=0.05, height_shift_range=0.05, shear_range=5, zoom_range=0.5, horizontal_flip=True, fill_mode='nearest') #Criam-se os dados de treino e de validação #Para gravar estes dados definir uma localização para as variáveis save_to_dir, #save_to_dir_imageval e save_to_dir_labelval trainGene = trainGenerator(batch_size, 'data/train/epicardio', 'image', 'label', data_gen_args, save_to_dir=None) validationGene = validationGenerator(batch_size, 'data/validation/epicardio', 'image', 'label', data_gen_args, save_to_dir_imageval=None, save_to_dir_labelval=None) #Keras callbacks model_checkpoint = ModelCheckpoint('unet_epicardio.hdf5', monitor='val_loss', verbose=1, save_best_only=True)
def train(): current_date_time = get_current_date_time() # create directories create_directory('models/{}'.format(object_type), format=False) create_directory('models/{}/models_{}'.format(object_type, current_date_time)) create_directory('models/{}/models_{}/logs'.format(object_type, current_date_time)) # data generator parameters if os.getenv('AUGMENT_FLAG') == 'TRUE': data_gen_args = dict( rotation_range=0.2, width_shift_range=0.1, height_shift_range=0.1, shear_range=0.1, zoom_range=0.1, horizontal_flip=True, vertical_flip=True, # fill_mode='nearest' ) else: data_gen_args = dict() # define training hyper-parameters epochs = int(os.getenv('EPOCHS')) train_dataset_size = int(os.getenv('TRAIN_DATASETSIZE')) valid_dataset_size = int(os.getenv('VALID_DATASETSIZE')) batch_size = int(os.getenv('BATCHSIZE')) # get train data generator if os.getenv('SAVE_AUGMENTED') == 'TRUE': create_directory(os.path.join(dataset_path, 'augmented')) create_directory(os.path.join(dataset_path, 'augmented/images')) create_directory(os.path.join(dataset_path, 'augmented/masks')) save_to_dir = os.path.join(dataset_path, 'augmented') else: save_to_dir = None traingen = trainGenerator(batch_size, os.path.join(dataset_path, 'train'), 'images', 'masks', data_gen_args, save_to_dir=save_to_dir) # get validation data generator validgen = trainGenerator(batch_size, os.path.join(dataset_path, 'valid'), 'images', 'masks', data_gen_args, save_to_dir=None) # instantiate model model = unet() # define callbacks csv_logger = CSVLogger( 'models/{}/models_{}/logs/training_logs_{}.csv'.format( object_type, current_date_time, current_date_time), append=True, separator=';') model_checkpoint = ModelCheckpoint('models/{}/models_{}/{}'.format( object_type, current_date_time, 'weights.epoch_{epoch:02d}-valloss_{val_loss:.2f}.hdf5'), monitor="val_loss", verbose=0, save_best_only=False, save_weights_only=False, period=1) # start training print('\nStarting training ...') model.fit_generator(traingen, steps_per_epoch=train_dataset_size // batch_size, epochs=epochs, validation_data=validgen, validation_steps=valid_dataset_size // batch_size, callbacks=[model_checkpoint, csv_logger]) print('DONE !') # save model details in json format print('\nWriting models metadata ...') save_model_metadata(model, current_date_time) print('DONE !') return current_date_time
from data import trainGenerator, testGenerator, saveResult from model import unet, ModelCheckpoint data_gen_args = dict( rotation_range=0.2, width_shift_range=0.05, height_shift_range=0.05, shear_range=0.05, zoom_range=0.05, horizontal_flip=True, fill_mode='nearest' ) myGene = trainGenerator( 2, '/g/schwab/hennies/teaching/datasets/em_gt/', 'raw', 'mem_gt', data_gen_args, save_to_dir=None, target_size=(512, 512) ) model = unet(input_size=(512, 512, 1)) model_checkpoint = ModelCheckpoint('unet_membrane.h5', monitor='loss', verbose=1, save_best_only=True) model.fit_generator(myGene, steps_per_epoch=100, epochs=3, callbacks=[model_checkpoint]) testGene = testGenerator('/g/schwab/hennies/teaching/datasets/em_test/raw/', num_image=64, target_size=(512, 512)) model = unet(input_size=(512, 512, 1)) model.load_weights('unet_membrane.h5') results = model.predict_generator(testGene, 64, verbose=1) saveResult('/g/schwab/hennies/teaching/datasets/em_test/result_unet/', results)
from os import chdir chdir(r"C:\Users\jsalm\Documents\Python Scripts\Automated_Histo\unet-master\unet-master") from model import * from data import testGenerator,trainGenerator,saveResult #os.environ["CUDA_VISIBLE_DEVICES"] = "0" data_gen_args = dict(rotation_range=0.2, width_shift_range=0.05, height_shift_range=0.05, shear_range=0.05, zoom_range=0.05, horizontal_flip=True, fill_mode='nearest') myGene = trainGenerator(2,'data/membrane/train','image','label',data_gen_args,save_to_dir = None) model = unet() model_checkpoint = ModelCheckpoint('unet_membrane.hdf5', monitor='loss',verbose=1, save_best_only=True) model.fit_generator(myGene,steps_per_epoch=300,epochs=5,callbacks=[model_checkpoint]) testGene = testGenerator("data/membrane/test") results = model.predict_generator(testGene,56657,verbose=1) saveResult("data/membrane/test",results)
import tensorflow as tf from model import unet from data import trainGenerator, testGenerator, saveResult os.environ["TF_CPP_MIN_LOG_LEVEL"]="2" # This is to filter out TensorFlow INFO and WARNING logs #os.environ["CUDA_VISIBLE_DEVICES"]="0" # Make 1 GPU visible for training data_gen_args = dict(rotation_range=0.2, width_shift_range=0.05, height_shift_range=0.05, shear_range=0.05, zoom_range=0.05, horizontal_flip=True, fill_mode='nearest') myGene = trainGenerator(batch_size=2, train_path='data/membrane/train', image_folder='image', mask_folder='label', aug_dict=data_gen_args, save_to_dir=None) model = unet() model_checkpoint = tf.keras.callbacks.ModelCheckpoint('unet_membrane.hdf5', monitor='loss', verbose=1, save_best_only=True) model.fit_generator(myGene, steps_per_epoch=2000, epochs=5, callbacks=[model_checkpoint]) testGene = testGenerator(test_path='data/membrane/test') results = model.predict_generator(testGene, 30, verbose=1) saveResult(save_path='data/membrane/test', npyfile=results)
def __init__(self, conf): super(test_model, self).__init__() self.conf = conf self.net = torch.load(conf.model_path) self.datagen = data.trainGenerator(conf.test_data_path, conf.label_path, conf.edg_path)
import constant_model import os from keras.callbacks import ModelCheckpoint from keras.models import load_model from save_predict_nii_gz import save_data_dir from roi import ROI data.get_train_and_test() # # model = load_model(constant_model.save_model_name) myGenerator = data.trainGenerator( batch_size=constant_model.batch_size, train_path=constant_model.train_path, image_folder=constant_model.classes_image, mask_folder=constant_model.classes_label, aug_dict=constant_model.data_gen_args, flag_multi_class=constant_model.flag_multi_class, num_class=constant_model.num_class, save_to_dir=constant_model.data_gen_save_to_dir, target_size=constant_model.target_size) model = model.unet() model_checkpoint = ModelCheckpoint(constant_model.save_model_name, monitor='loss', verbose=1, save_best_only=True) model.fit_generator(myGenerator, steps_per_epoch=constant_model.steps_per_epoch, epochs=constant_model.epochs, callbacks=[model_checkpoint, constant_model.tbCallBack]) testGene = data.testGenerator(constant_model.test_path,
# 数据生成器参数 data_gen_args = dict( rotation_range=15, # 随机旋转角度 width_shift_range=0.05, height_shift_range=0.05, shear_range=0.05, # 剪切强度(以弧度逆时针方向剪切角度) zoom_range=0.05, # 随机缩放范围。 horizontal_flip=True, # 随机水平翻转 fill_mode='nearest', rescale=1.0 / 255) # 填充模式 myGene = trainGenerator( 4, '../data/train', data_gen_args, image_color_mode="rgb", save_to_dir=None, target_size=TARGET_INPUT_SIZE) # save_to_dir = 'data/trainGenerator' # print(myGene) # test_datagen = ImageDataGenerator(rescale=1. / 255) # validation_generator = test_datagen.flow_from_directory( # '../data/test', # target_size=TARGET_INPUT_SIZE, # batch_size=4, # classes=['0', '1','2'], # class_mode='categorical', # color_mode="rgb") # print(myGene)
from tensorflow.python.client import device_lib device_lib.list_local_devices() import data, model model_obj = model.unet() data_gen_args = dict(rotation_range=180, width_shift_range=0, height_shift_range=0, shear_range=45, zoom_range=0.2, horizontal_flip=True, fill_mode='reflect') myGene = data.trainGenerator( 1, '/content/drive/My Drive/Stomata_Project/training_data/train', 'image', 'label', data_gen_args, save_to_dir= '/content/drive/My Drive/Stomata_Project/training_data/train/aug') from keras.callbacks import ModelCheckpoint import matplotlib.pyplot as plt model_checkpoint = ModelCheckpoint( '/content/drive/My Drive/Stomata_Project/training_data/unet_stomata_steps_100_epochs_6.hdf5', monitor='loss', verbose=1, save_best_only=True) history = model_obj.fit_generator(myGene, steps_per_epoch=100, epochs=6, callbacks=[model_checkpoint])
if not os.path.exists(experiment_folder): os.mkdir(experiment_folder) dataset_folder = '/g/schwab/hennies/phd_project/image_analysis/autoseg/membrane_predictions' data_gen_args = dict(rotation_range=0.2, width_shift_range=0.05, height_shift_range=0.05, shear_range=0.05, zoom_range=0.05, horizontal_flip=True, fill_mode='nearest') myGene = trainGenerator(2, os.path.join(dataset_folder, 'em_gt'), 'mem_pred', 'mem_gt_2', data_gen_args, save_to_dir=None, target_size=(512, 512)) model = unet(input_size=(512, 512, 1), pretrained_weights=os.path.join(experiment_folder, 'unet_membrane.h5')) model_checkpoint = ModelCheckpoint(os.path.join(experiment_folder, 'unet_membrane.h5'), monitor='loss', verbose=1, save_best_only=True) model.fit_generator(myGene, steps_per_epoch=2000, epochs=5,
import tensorflow as tf if tf.test.gpu_device_name(): print('Default GPU Device: {}'.format(tf.test.gpu_device_name())) else: print("Please install GPU version of tensorflow") # GENEROVAT DATA data_gen_args = dict(rotation_range=0.2, width_shift_range=0.05, height_shift_range=0.05, shear_range=0.05, zoom_range=0.05, horizontal_flip=True, fill_mode='reflect') myGene = data.trainGenerator(2,train_dir,'image',label_dir,data_gen_args,save_to_dir = None) # VYTVORIT U-NET a) nanovo def create_unet(myGene): mdl = model.unet() model_checkpoint = ModelCheckpoint('../model/unet_membrane.hdf5', monitor='loss',verbose=1, save_best_only=True) history = mdl.fit_generator(myGene,steps_per_epoch=steps,epochs=epochs,callbacks=[model_checkpoint]) print(history.history['loss']) # ulozit historiu #with open('../model/history.json', 'w') as f: #json.dump(history.history, f) data.saveHist('../model/history.json', history) return mdl, history.history # b) zo suboru def load_unet():