Ejemplo n.º 1
0
    def __init__(self, planner_type):
        self.type = planner_type
        model_params = {}
        model_params['lipschitz_constant'] = 0.25
        model_params['num_hidden_layers'] = 1
        model_params['hidden_layer_nodes'] = 32
        model_params['activation_fn'] = 'relu'
        model_params['learning_rate'] = 0.001
        model_params['observation_size'] = 4
        model_params['num_models'] = 16
        model_params['num_epochs'] = 5
        model_params['num_samples'] = 2000
        load = True
        run_ID = 0
        gaussian_variance = 0.05
        fname='best_models/em_models/model-'+str(run_ID)+"-"+str(model_params['num_samples'])+"-"+str(model_params['learning_rate'])+\
          "-"+str(gaussian_variance)+"-"+str(model_params['num_hidden_layers'])+"-"+str(model_params['lipschitz_constant'])
        #print('here')
        self.em_model_object = transition_model.neural_transition_model(
            model_params, load, fname)

        model_params = {}
        model_params["observation_size"] = 6
        model_params["num_hidden_layers"] = 2
        model_params["hidden_layer_nodes"] = 32
        model_params["activation_fn"] = 'relu'
        model_params["learning_rate"] = 0.005
        fname = 'best_models/deterministic_models/'
        self.other_models_object = other_models.neural_other_model(
            model_params, True, fname)
Ejemplo n.º 2
0
Archivo: main.py Proyecto: kavosh8/Lip
numpy.random.seed(run_number)
random.seed(run_number)
session_conf = tf.ConfigProto(intra_op_parallelism_threads=1,
                              inter_op_parallelism_threads=1)
from keras import backend as K
tf.set_random_seed(run_number)
sess = tf.Session(graph=tf.get_default_graph(), config=session_conf)
K.set_session(sess)
#make sure results are reproducable ...

li_w, li_em_obj = [], []
#build training data
li_samples, li_labels = utils.load_synthetic_data(model_params['num_samples'])
phi, y = utils.create_matrices(li_samples, li_labels, model_params)
#create transition model
tm = transition_model.neural_transition_model(model_params)
#create em object
em_object = em.em_learner(em_params)
for iteration in range(em_params['num_iterations']):
    li_em_obj.append(em_object.e_step_m_step(tm, phi, y,
                                             iteration))  # do one EM iteration
    li_w.append(utils.compute_exact_wass_loss(tm, em_object))
    print(iteration, "li_em_obj:", li_em_obj[-1], "li_w",
          li_w[-1])  #print EM objective
    sys.stdout.flush()
    numpy.savetxt("log/w_loss-"+str(run_number)+"-"+\
        str(model_params['num_samples'])+"-"+str(model_params['learning_rate'])+\
        "-"+str(em_params['gaussian_variance'])+"-"+str(model_params['num_hidden_layers'])+"-"+str(model_params['lipschitz_constant'])+".txt",li_w)
    numpy.savetxt("log/em_loss-"+str(run_number)+"-"+\
        str(model_params['num_samples'])+"-"+str(model_params['learning_rate'])+\
        "-"+str(em_params['gaussian_variance'])+"-"+str(model_params['num_hidden_layers'])+"-"+str(model_params['lipschitz_constant'])+".txt",li_em_obj)
Ejemplo n.º 3
0
model_params['lipschitz_constant'] = 0.25
model_params['num_hidden_layers'] = 1
model_params['hidden_layer_nodes'] = 32
model_params['activation_fn'] = 'relu'
model_params['learning_rate'] = 0.001
model_params['observation_size'] = 4
model_params['num_models'] = 16
model_params['num_epochs'] = 5
model_params['num_samples'] = 3000
gaussian_variance = 0.05
run_ID = 4

fname='best_models/model-'+str(run_ID)+"-"+str(model_params['num_samples'])+"-"+str(model_params['learning_rate'])+\
   "-"+str(gaussian_variance)+"-"+str(model_params['num_hidden_layers'])+"-"+str(model_params['lipschitz_constant'])
tm = transition_model.neural_transition_model(model_params, True, fname)

for dimension_number in range(model_params['observation_size']):
    plt.subplot(str(22) + str(dimension_number))
    all_li = []
    s_li = range(8)
    for _ in range(model_params['num_models']):
        all_li.append([])
    for s in s_li:
        temp = [3, 3, 3, 3]
        temp[dimension_number] = s
        x = numpy.array(temp).reshape((1, 4))
        x_prime = tm.predict(x)
        for z, xp in enumerate(x_prime):
            all_li[z].append(xp[0][dimension_number])
    for y in all_li: