Exemple #1
0
def start_all_logging_instruments(hyper, results_path, test_images):
    writer = tf.summary.create_file_writer(logdir=results_path)
    logger = setup_logger(log_file_name=append_timestamp_to_file(file_name=results_path + '/loss.log',
                                                                 termination='.log'),
                          logger_name=append_timestamp_to_file('logger', termination=''))
    log_all_hyperparameters(hyper=hyper, logger=logger)
    plot_originals(test_images=test_images, results_path=results_path)
    return writer, logger
Exemple #2
0
def start_all_logging_instruments(hyper, test_images):
    results_path = determine_path_to_save_results(
        model_type=hyper['model_type'], dataset_name=hyper['dataset_name'])
    if not os.path.exists(results_path):
        os.mkdir(results_path)
    logger = setup_logger(log_file_name=append_timestamp_to_file(
        file_name=results_path + '/loss.log', termination='.log'),
                          logger_name=append_timestamp_to_file('logger',
                                                               termination=''))
    log_all_hyperparameters(hyper=hyper, logger=logger)
    plot_originals(test_images=test_images, results_path=results_path)
    return logger, results_path
Exemple #3
0
def run_sop(hyper, results_path, data):
    train_dataset, test_dataset = data

    sop_optimizer = setup_sop_optimizer(hyper=hyper)

    logger = setup_logger(log_file_name=append_timestamp_to_file(
        file_name=results_path + f'/loss_{sop_optimizer.model.model_type}.log',
        termination='.log'))
    log_all_hyperparameters(hyper=hyper, logger=logger)
    train_sop(sop_optimizer=sop_optimizer,
              hyper=hyper,
              train_dataset=train_dataset,
              test_dataset=test_dataset,
              logger=logger)
Exemple #4
0
def run_sop(hyper, results_path):
    tf.random.set_seed(seed=hyper['seed'])
    data = load_mnist_sop_data(batch_n=hyper['batch_size'])
    train_dataset, test_dataset = data

    sop_optimizer = setup_sop_optimizer(hyper=hyper)

    model_type = sop_optimizer.model.model_type
    log_path = results_path + f'/loss_{model_type}.log'
    logger = setup_logger(log_file_name=append_timestamp_to_file(
        file_name=log_path, termination='.log'),
                          logger_name=model_type + str(hyper['seed']))
    log_all_hyperparameters(hyper=hyper, logger=logger)
    save_hyper(hyper)
    train_sop(sop_optimizer=sop_optimizer,
              hyper=hyper,
              train_dataset=train_dataset,
              test_dataset=test_dataset,
              logger=logger)
Exemple #5
0
import time
import numpy as np
import tensorflow as tf
from Utils.Distributions import compute_gradients, apply_gradients
from Utils.general import initialize_mu_and_xi_for_logistic, initialize_mu_and_xi_equally, setup_logger

logger = setup_logger(log_file_name='./Log/discrete.log')


class MinimizeEmpiricalLoss:
    def __init__(self,
                 params,
                 learning_rate,
                 temp,
                 sample_size=int(1.e3),
                 max_iterations=int(1.e4),
                 run_kl=True,
                 tolerance=1.e-5,
                 model_type='IGR_I',
                 threshold=0.9,
                 planar_flow=None):

        self.params = params
        self.learning_rate = learning_rate
        self.temp = tf.constant(value=temp, dtype=tf.float32)
        self.sample_size = sample_size
        self.max_iterations = max_iterations
        self.run_kl = run_kl
        self.tolerance = tolerance
        self.model_type = model_type
        self.threshold = threshold