def main(): direc_data = '/data/npz_data/cells/unspecified_nuclear_data/nuclear_movie/' dataset = 'nuclear_movie_same' training_data = np.load('{}{}.npz'.format(direc_data, dataset)) optimizer = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True) lr_sched = rate_scheduler(lr=0.01, decay=0.99) in_shape = (14, 14, 1) model = the_model(input_shape=in_shape) #, n_features=1, reg=1e-5) train_model_siamese( model=model, dataset='nuclear_movie_same', optimizer=optimizer, expt='', it=0, batch_size=1, n_epoch=100, direc_save='/data/models/cells/unspecified_nuclear_data/nuclear_movie', direc_data= '/data/npz_data/cells/unspecified_nuclear_data/nuclear_movie/', lr_sched=lr_sched, rotation_range=0, flip=True, shear=0, class_weight=None)
# Create output ditrectory, if necessary pathlib.Path(direc_save).mkdir(parents=True, exist_ok=True) optimizer = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True) lr_sched = rate_scheduler(lr=0.01, decay=0.99) file_name = os.path.join(direc_data, dataset + ".npz") training_data = np.load(file_name) class_weights = training_data["class_weights"] for iterate in range(1): model = the_model(batch_shape=(1, 512, 512, 1), n_features=3, reg=1e-5, softmax=True, permute=True) train_model(model=model, dataset=dataset, optimizer=optimizer, expt=expt, it=iterate, batch_size=batch_size, n_epoch=n_epoch, direc_save=direc_save, direc_data=direc_data, lr_sched=lr_sched, class_weight=class_weights, rotation_range=180,
n_epoch = 25 data_format = "channels_first" dataset = "cytoplasm_61x61" expt = "bn_feature_net_61x61" direc_save = "/home/davince/Dropbox (OIST)/deepcell-tf-master/trained_networks/20180330_cytoplasm_raw/" direc_data = "/home/davince/Dropbox (OIST)/deepcell-tf-master/training_data_npz/20180401_newdata_Raw/" optimizer = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True) lr_sched = rate_scheduler(lr=0.01, decay=0.99) class_weights = {0: 1, 1: 1, 2: 1} for iterate in xrange(3): model = the_model(n_channels=1, n_features=3, reg=1e-5) train_model(model=model, dataset=dataset, optimizer=optimizer, expt=expt, it=iterate, batch_size=batch_size, n_epoch=n_epoch, direc_save=direc_save, direc_data=direc_data, lr_sched=lr_sched, class_weight=class_weights, rotation_range=180, flip=True, shear=False,
direc_data = "/data/training_data_npz/nuclear_movie/" # Create output ditrectory, if necessary pathlib.Path(direc_save).mkdir(parents=True, exist_ok=True) optimizer = SGD(lr=1e-2, decay=1e-6, momentum=0.9, nesterov=True) lr_sched = rate_scheduler(lr=1e-2, decay=0.99) file_name = os.path.join(direc_data, dataset + ".npz") training_data = np.load(file_name) for iterate in range(1): model = the_model(batch_shape=(1, 1, 5, 256, 256), n_features=3, reg=1e-5, location=False, permute=True, softmax=False) trained_model = train_model(model=model, dataset=dataset, optimizer=optimizer, expt=expt, it=iterate, batch_size=batch_size, n_epoch=n_epoch, direc_save=direc_save, direc_data=direc_data, number_of_frames=5, lr_sched=lr_sched, rotation_range=180,
direc_save = "/data/trained_networks/nuclei/" direc_data = "/data/training_data_npz/nuclei/" optimizer = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True) lr_sched = rate_scheduler(lr=0.01, decay=0.99) file_name = os.path.join(direc_data, dataset + ".npz") training_data = np.load(file_name) class_weights = training_data["class_weights"] for iterate in xrange(1): model = the_model(input_shape=(1, 512, 512), n_features=3, reg=1e-5, softmax=True, permute=True) train_model(model=model, dataset=dataset, optimizer=optimizer, expt=expt, it=iterate, batch_size=batch_size, n_epoch=n_epoch, direc_save=direc_save, direc_data=direc_data, lr_sched=lr_sched, class_weight=class_weights, rotation_range=180,
batch_size = 1 n_epoch = 200 dataset = "nuclei_conv_61x61" expt = "retina_net" # direc_save = "/data/trained_networks/nuclei/" direc_data= "/data/training_data_npz/nuclei/" optimizer = Adam(lr=1e-5, clipnorm=0.001) # optimizer = SGD(lr = 0.01, momentum = 0.9, nesterov = True) lr_sched = rate_scheduler(lr = 1e-5, decay = 0.99) # file_name = os.path.join(direc_data, dataset + ".npz") training_data = np.load(file_name) for iterate in xrange(1): model = the_model(num_classes = 1, input_shape = (1,512,512)) trained_model = train_model(model = model, dataset = dataset, optimizer = optimizer, expt = expt, it = iterate, batch_size = batch_size, n_epoch = n_epoch, direc_save = direc_save, direc_data = direc_data, lr_sched = lr_sched, rotation_range = 0, flip = False, shear = False)
direc_save = "/data/trained_networks/nuclei_broad/" direc_data = "/data/training_data_npz/nuclei_broad/" optimizer = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True) lr_sched = rate_scheduler(lr=0.01, decay=0.99) file_name = os.path.join(direc_data, dataset + ".npz") training_data = np.load(file_name) class_weights = training_data["class_weights"] for iterate in xrange(1): model = the_model(batch_shape=(1, 1, 512, 512), n_features=16, reg=1e-5, softmax=False, location=True, permute=True) train_model(model=model, dataset=dataset, optimizer=optimizer, expt=expt, it=iterate, batch_size=batch_size, n_epoch=n_epoch, direc_save=direc_save, direc_data=direc_data, lr_sched=lr_sched, class_weight=class_weights, rotation_range=180,
# Create output ditrectory, if necessary pathlib.Path(direc_save).mkdir(parents=True, exist_ok=True) optimizer = SGD(lr=1e-2, decay=1e-6, momentum=0.9, nesterov=True) lr_sched = rate_scheduler(lr=1e-2, decay=0.99) file_name = os.path.join(direc_data, dataset + ".npz") training_data = np.load(file_name) class_weights = training_data["class_weights"] print(class_weights) for iterate in range(1): model = the_model(input_shape=(512, 512, 2), n_features=3, reg=1e-5, location=False, permute=False) trained_model = train_model(model=model, dataset=dataset, optimizer=optimizer, expt=expt, it=iterate, batch_size=batch_size, n_epoch=n_epoch, direc_save=direc_save, direc_data=direc_data, lr_sched=lr_sched, class_weight=class_weights, rotation_range=180,
direc_save = "/data/trained_networks/HeLa/" direc_data = "/data/training_data_npz/HeLa/" optimizer = SGD(lr=1e-2, decay=1e-6, momentum=0.9, nesterov=True) lr_sched = rate_scheduler(lr=1e-2, decay=0.99) file_name = os.path.join(direc_data, dataset + ".npz") training_data = np.load(file_name) class_weights = training_data["class_weights"] print(class_weights) for iterate in xrange(1): model = the_model(input_shape=(2, 512, 512), n_features=3, reg=1e-5, permute=True) trained_model = train_model(model=model, dataset=dataset, optimizer=optimizer, expt=expt, it=iterate, batch_size=batch_size, n_epoch=n_epoch, direc_save=direc_save, direc_data=direc_data, lr_sched=lr_sched, class_weight=class_weights, rotation_range=180, flip=True,