Esempio n. 1
0
def run_sop(hyper, results_path):
    tf.random.set_seed(seed=hyper['seed'])
    data = load_mnist_sop_data(batch_n=hyper['batch_size'])
    train_dataset, test_dataset = data

    sop_optimizer = setup_sop_optimizer(hyper=hyper)

    model_type = sop_optimizer.model.model_type
    log_path = results_path + f'/loss_{model_type}.log'
    logger = setup_logger(log_file_name=append_timestamp_to_file(
        file_name=log_path, termination='.log'),
                          logger_name=model_type + str(hyper['seed']))
    log_all_hyperparameters(hyper=hyper, logger=logger)
    save_hyper(hyper)
    train_sop(sop_optimizer=sop_optimizer,
              hyper=hyper,
              train_dataset=train_dataset,
              test_dataset=test_dataset,
              logger=logger)
Esempio n. 2
0
def test_in_mnist_sample():
    batch_n, epochs, width, height, rgb = 4, 3, 14, 28, 1
    hyper = {
        'width_height': (width, height, rgb),
        'model_type': '',
        'units_per_layer': 240,
        'batch_size': batch_n,
        'learning_rate': 0.0003,
        'weight_decay': 1.e-3,
        'epochs': epochs,
        'iter_per_epoch': 10,
        'test_sample_size': 1,
        'temp': tf.constant(0.1)
    }
    results_path = './Log/'
    data = load_mnist_sop_data(batch_n=batch_n, run_with_sample=True)
    models = ['GS', 'IGR_I', 'IGR_Planar']
    # models = ['GS', 'IGR_I']
    for model in models:
        hyper['model_type'] = model
        run_sop(hyper=hyper, results_path=results_path, data=data)
Esempio n. 3
0
File: TestSOP.py Progetto: yyht/igr
 def test_in_mnist_sample(self):
     batch_n = 6
     epochs = 5
     width = 14
     height = 28
     hyper = {
         'width_height': (width, height, 1),
         'model_type': 'GS',
         'batch_size': batch_n,
         'learning_rate': 0.001,
         'epochs': epochs,
         'iter_per_epoch': 10,
         'temp': tf.constant(0.1)
     }
     results_path = './Log/'
     data = load_mnist_sop_data(batch_n=batch_n,
                                epochs=epochs,
                                run_with_sample=True)
     train_dataset, test_dataset = data
     run_sop(hyper=hyper, results_path=results_path, data=data)
     hyper['model_type'] = 'IGR'
     run_sop(hyper=hyper, results_path=results_path, data=data)
Esempio n. 4
0
select_case = 2
run_with_sample = False
# samples_n = 1 * int(1.e3)
samples_n = 1 * int(1.e3)

hyper_file, weights_file = 'hyper.pkl', 'w.h5'
model_type = models[select_case]['model_type']
path_to_trained_models += models[select_case]['model_dir'] + '/'

with open(file=path_to_trained_models + hyper_file, mode='rb') as f:
    hyper = pickle.load(f)

batch_n = hyper['batch_size']
hyper['test_sample_size'] = samples_n
tf.random.set_seed(seed=hyper['seed'])
data = load_mnist_sop_data(batch_n=hyper['batch_size'],
                           run_with_sample=run_with_sample)
train_dataset, test_dataset = data
epoch = hyper['epochs']
sop_optimizer = setup_sop_optimizer(hyper=hyper)
for x in train_dataset:
    x_upper = x[:, :14, :, :]
    break
sop_optimizer.batch_n = batch_n
aux = sop_optimizer.model.call(x_upper)
sop_optimizer.model.load_weights(filepath=path_to_trained_models +
                                 weights_file)

test_loss_mean = tf.keras.metrics.Mean()
for x in test_dataset:
    loss = evaluate_loss_on_batch(x, sop_optimizer, hyper)
    test_loss_mean(loss)
Esempio n. 5
0
File: viz_sop.py Progetto: yyht/igr
from Utils.load_data import load_mnist_sop_data
from Models.SOP import SOP
from Models.SOPOptimizer import viz_reconstruction
import tensorflow as tf

model_type = 'GS'
hyper = {
    'width_height': (14, 28, 1),
    'model_type': model_type,
    'batch_size': 64,
    'learning_rate': 0.0003,
    'epochs': 100,
    'iter_per_epoch': 937,
    'temp': tf.constant(0.67)
}
data = load_mnist_sop_data(batch_n=hyper['batch_size'], epochs=hyper['epochs'])
train, test = data
model = SOP(hyper=hyper)
results_file = './Log/model_weights_GS.h5'
shape = (hyper['batch_size'], ) + hyper['width_height']
shape = (64, 14, 28, 1)
model.build(input_shape=shape)
model.load_weights(filepath=results_file)
for x_test in test.take(10):
    images = x_test

viz_reconstruction(test_image=images, model=model)
Esempio n. 6
0
from Utils.load_data import load_mnist_sop_data
from Models.SOP import SOP
from Models.SOPOptimizer import viz_reconstruction
import tensorflow as tf

model_type = 'GS'
hyper = {
    'width_height': (14, 28, 1),
    'model_type': model_type,
    'batch_size': 64,
    'learning_rate': 0.0003,
    'epochs': 100,
    'iter_per_epoch': 937,
    'temp': tf.constant(0.67)
}
data = load_mnist_sop_data(batch_n=hyper['batch_size'])
train, test = data
model = SOP(hyper=hyper)
results_file = './Log/model_weights_GS.h5'
shape = (hyper['batch_size'], ) + hyper['width_height']
model.build(input_shape=shape)
model.load_weights(filepath=results_file)
images = 0
for x_test in test.take(10):
    images = x_test

viz_reconstruction(test_image=images, model=model)