####################################################################
#################################################################### 
time_start = 0
best_valid = inf
# main loop
# ------------------------------------------------------------------------------
lr_decay_epoch = 0
n_lr_decays = 0
train_ce, valid_ce = [], []
flag=True
global insp_
insp_ = None

res_dir = save_results(train_ce, valid_ce, res_dir, params=params)

save_params(params, res_dir)

lr.decay = 1

for epoch in xrange(tr.n_epochs):
    ce = []
    print_params(params) 
    ####################################################################
    ####################################################################
    print "\n%s\n\t epoch %d \n%s"%('-'*30, epoch, '-'*30)
    ####################################################################
    ####################################################################
    time_start = time()
    for i in range(loader.n_iter_train):     
        #load data
        time_start_iter = time()
apply_updates, train_model, test_model = net_convnet3d_grbm_early_fusion.build_finetune_functions(x_, y_int32, x_skeleton_,learning_rate)


######################################################################
print "\n%s\n\ttraining\n%s"%(('-'*30,)*2)

time_start = 0
best_valid = inf
lr_decay_epoch = 0
n_lr_decays = 0
train_ce, valid_ce = [], []
out_mean_all, out_std_all = [], []

res_dir = save_results(train_ce, valid_ce, res_dir, params=net_convnet3d_grbm_early_fusion.params)

save_params(net_convnet3d_grbm_early_fusion.params, res_dir)

# default learning rate
lr.start = 0.0001
lr.stop = 0.00001
  
# Wudi makes thie to explicity control the learning rate
learning_rate_map = linspace(lr.start, lr.stop, tr.n_epochs)

for epoch in xrange(tr.n_epochs):
    learning_rate.set_value(float32(learning_rate_map[epoch]))
    ce = []
    out_mean_train = []
    out_std_train = []
    print_params(net_convnet3d_grbm_early_fusion.params) 
    ####################################################################
####################################################################
####################################################################
time_start = 0
best_valid = inf
# main loop
# ------------------------------------------------------------------------------
lr_decay_epoch = 0
n_lr_decays = 0
train_ce, valid_ce = [], []
flag = True
global insp_
insp_ = None

res_dir = save_results(train_ce, valid_ce, res_dir, params=params)

save_params(params, res_dir)

lr.decay = 1

for epoch in xrange(tr.n_epochs):
    ce = []
    print_params(params)
    ####################################################################
    ####################################################################
    print "\n%s\n\t epoch %d \n%s" % ('-' * 30, epoch, '-' * 30)
    ####################################################################
    ####################################################################
    time_start = time()
    for i in range(loader.n_iter_train):
        #load data
        time_start_iter = time()
Ejemplo n.º 4
0
######################################################################
print "\n%s\n\ttraining\n%s" % (('-' * 30, ) * 2)

time_start = 0
best_valid = inf
lr_decay_epoch = 0
n_lr_decays = 0
train_ce, valid_ce = [], []
out_mean_all, out_std_all = [], []

res_dir = save_results(train_ce,
                       valid_ce,
                       res_dir,
                       params=net_convnet3d_grbm_early_fusion.params)

save_params(net_convnet3d_grbm_early_fusion.params, res_dir)

# default learning rate
lr.start = 0.0001
lr.stop = 0.00001

# Wudi makes thie to explicity control the learning rate
learning_rate_map = linspace(lr.start, lr.stop, tr.n_epochs)

for epoch in xrange(tr.n_epochs):
    learning_rate.set_value(float32(learning_rate_map[epoch]))
    ce = []
    out_mean_train = []
    out_std_train = []
    print_params(net_convnet3d_grbm_early_fusion.params)
    ####################################################################