Ejemplo n.º 1
0
    # path_to_save_variables=home+'/Documents/tmp/pytorch_bvae'+str(i)+'.pt'
    # path_to_load_variables=home+'/Documents/tmp/pytorch_bvae'+str(i)+'.pt'
    # path_to_save_variables=''

    # model.load_state_dict(torch.load(path_to_save_variables, lambda storage, loc: storage))
    # print 'loaded variables ' + path_to_save_variables

    # path_to_save_variables=this_dir+'/params_'+args.model +'_'

    if torch.cuda.is_available():
        model.cuda()

    this_ckt_file = path_to_save_variables + str(3000) + '.pt'
    # this_ckt_file = path_to_save_variables + str(args.epoch) + '.pt'

    model.load_params(path_to_load_variables=this_ckt_file)

    # ffg_samps = [0,1,2,3]
    # ffg_samps = [5,6,7,8]

    # # used for standard
    ffg_samps = [6, 3, 2, 5]
    # ffg_samps = [5]
    # ffg_samps = [6]

    rows = 4
    cols = len(ffg_samps) + 1  #for annotation

    legend = False

    fig = plt.figure(figsize=(3 + cols, 1 + rows), facecolor='white')
Ejemplo n.º 2
0
#                     'rv_arch': [[x_size+z_size,200],[200,200],[200,z_size*2]],
#                     'flow_hidden_size': 100
#                 }

#     # model = VAE(hyper_config)



# else:
#     print ('What')
#     fadas


trained_model = VAE(hyper_config)
# model.load_params(home+'/Documents/tmp/first_try/'+args.model+'/params_'+args.model+'_2800.pt')
trained_model.load_params(home+'/Documents/tmp/new_training/standard/params_standard_1000.pt')


# now to reset encoder, or even load a new one with different architecture. 
# could maybe init a new one. and get model to ignore the old one. 
# antoher option is init the right model then just load the params you want 
# dont init q
# how does it know which params are the decoder ones, besides shape?
    #ah they have keys names! mine are mostly numbers
    
# # print (model.state_dict())



# print (model.state_dict().keys())
# fsdaas
Ejemplo n.º 3
0
#                     'n_flows': 2,
#                     'qv_arch': [[x_size,200],[200,200],[200,z_size*2]],
#                     'qz_arch': [[x_size+z_size,200],[200,200],[200,z_size*2]],
#                     'rv_arch': [[x_size+z_size,200],[200,200],[200,z_size*2]],
#                     'flow_hidden_size': 100
#                 }

#     # model = VAE(hyper_config)

else:
    print('What')
    fadas

model = VAE(hyper_config)
print('done init')
model.load_params(home + '/Documents/tmp/large_N_time/' + args.model +
                  '/params_' + args.model + '_1.pt')

#Train params
learning_rate = .001
batch_size = 100
k = 1
epochs = 3000

#save params and compute IW and AIS
start_at = 100
save_freq = 300
display_epoch = 10

# Test params
k_IW = 2000
batch_size_IW = 20



    # path_to_save_variables=this_dir+'/params_'+args.model +'_'



    if torch.cuda.is_available():
        model.cuda()


    this_ckt_file = path_to_save_variables + str(3000) + '.pt'
    # this_ckt_file = path_to_save_variables + str(args.epoch) + '.pt'

    model.load_params(path_to_load_variables=this_ckt_file)

    

    # ffg_samps = [0,1,2,3]
    # ffg_samps = [5,6,7,8]


    # # used for standard
    ffg_samps = [6,3,2,5]
    # ffg_samps = [5]
    # ffg_samps = [6]



Ejemplo n.º 5
0
#                     'n_flows': 2,
#                     'qv_arch': [[x_size,200],[200,200],[200,z_size*2]],
#                     'qz_arch': [[x_size+z_size,200],[200,200],[200,z_size*2]],
#                     'rv_arch': [[x_size+z_size,200],[200,200],[200,z_size*2]],
#                     'flow_hidden_size': 100
#                 }

#     # model = VAE(hyper_config)

# else:
#     print ('What')
#     fadas

trained_model = VAE(hyper_config)
# model.load_params(home+'/Documents/tmp/first_try/'+args.model+'/params_'+args.model+'_2800.pt')
trained_model.load_params(
    home + '/Documents/tmp/new_training/standard/params_standard_1000.pt')

# now to reset encoder, or even load a new one with different architecture.
# could maybe init a new one. and get model to ignore the old one.
# antoher option is init the right model then just load the params you want
# dont init q
# how does it know which params are the decoder ones, besides shape?
#ah they have keys names! mine are mostly numbers

# # print (model.state_dict())

# print (model.state_dict().keys())
# fsdaas

# # for k,v in model.state_dict.items():
# #     print (k)
Ejemplo n.º 6
0
#                     'qv_arch': [[x_size,200],[200,200],[200,z_size*2]],
#                     'qz_arch': [[x_size+z_size,200],[200,200],[200,z_size*2]],
#                     'rv_arch': [[x_size+z_size,200],[200,200],[200,z_size*2]],
#                     'flow_hidden_size': 100
#                 }

#     # model = VAE(hyper_config)

else:
    print ('What')
    fadas


model = VAE(hyper_config)
print ('done init')
model.load_params(home+'/Documents/tmp/large_N_time/'+args.model+'/params_'+args.model+'_1.pt')



#Train params
learning_rate = .001
batch_size = 100
k = 1
epochs = 3000

#save params and compute IW and AIS
start_at = 100
save_freq = 300
display_epoch = 10