lr_decay_epoch = 0
n_lr_decays = 0
train_ce, valid_ce = [], []
flag=True
global insp_
insp_ = None

res_dir = save_results(train_ce, valid_ce, res_dir, params=params)

save_params(params, res_dir)

lr.decay = 1

for epoch in xrange(tr.n_epochs):
    ce = []
    print_params(params) 
    ####################################################################
    ####################################################################
    print "\n%s\n\t epoch %d \n%s"%('-'*30, epoch, '-'*30)
    ####################################################################
    ####################################################################
    time_start = time()
    for i in range(loader.n_iter_train):     
        #load data
        time_start_iter = time()
        loader.next_train_batch(x_, y_, x_skeleton_)
        tr.batch_size = y_.get_value(borrow=True).shape[0]
        ce.append(_batch(train_model, tr.batch_size, batch, True, apply_updates))
       
        timing_report(i, time()-time_start_iter, tr.batch_size, res_dir)
        print "\t| "+ training_report(ce[-1]) + ", finish total of: 0." + str(i*1.0/loader.n_iter_train)
save_params(net_convnet3d_grbm_early_fusion.params, res_dir)

# default learning rate
lr.start = 0.0001
lr.stop = 0.00001
  
# Wudi makes thie to explicity control the learning rate
learning_rate_map = linspace(lr.start, lr.stop, tr.n_epochs)

for epoch in xrange(tr.n_epochs):
    learning_rate.set_value(float32(learning_rate_map[epoch]))
    ce = []
    out_mean_train = []
    out_std_train = []
    print_params(net_convnet3d_grbm_early_fusion.params) 
    ####################################################################
    print "\n%s\n\t epoch %d \n%s"%('-'*30, epoch, '-'*30)
    time_start = time()
    for i in range(loader.n_iter_train):     
        #load data
        time_start_iter = time()
        loader.next_train_batch(x_, y_, x_skeleton_)
        ce_temp, out_mean_temp, out_std_temp = _batch(train_model, tr.batch_size, batch, True, apply_updates)
        ce.append(ce_temp)
        out_mean_train.append(out_mean_temp)
        out_std_train.append(out_std_temp)

        print "Training: No.%d iter of Total %d, %d s"% (i,loader.n_iter_train, time()-time_start_iter)  \
                + "\t| negative_log_likelihood "+ training_report(ce[-1]) 
    # End of Epoch
lr_decay_epoch = 0
n_lr_decays = 0
train_ce, valid_ce = [], []
flag = True
global insp_
insp_ = None

res_dir = save_results(train_ce, valid_ce, res_dir, params=params)

save_params(params, res_dir)

lr.decay = 1

for epoch in xrange(tr.n_epochs):
    ce = []
    print_params(params)
    ####################################################################
    ####################################################################
    print "\n%s\n\t epoch %d \n%s" % ('-' * 30, epoch, '-' * 30)
    ####################################################################
    ####################################################################
    time_start = time()
    for i in range(loader.n_iter_train):
        #load data
        time_start_iter = time()
        loader.next_train_batch(x_, y_, x_skeleton_)
        print('tr.batch_size_before=%d' % tr.batch_size)
        tr.batch_size = y_.get_value(borrow=True).shape[0]
        print('tr.batch_size_after=%d' % tr.batch_size)
        ce.append(
            _batch(train_model, tr.batch_size, batch, True, apply_updates)[0])
Ejemplo n.º 4
0
save_params(net_convnet3d_grbm_early_fusion.params, res_dir)

# default learning rate
lr.start = 0.0001
lr.stop = 0.00001

# Wudi makes thie to explicity control the learning rate
learning_rate_map = linspace(lr.start, lr.stop, tr.n_epochs)

for epoch in xrange(tr.n_epochs):
    learning_rate.set_value(float32(learning_rate_map[epoch]))
    ce = []
    out_mean_train = []
    out_std_train = []
    print_params(net_convnet3d_grbm_early_fusion.params)
    ####################################################################
    print "\n%s\n\t epoch %d \n%s" % ('-' * 30, epoch, '-' * 30)
    time_start = time()
    for i in range(loader.n_iter_train):
        #load data
        time_start_iter = time()
        loader.next_train_batch(x_, y_, x_skeleton_)
        ce_temp, out_mean_temp, out_std_temp = _batch(train_model,
                                                      tr.batch_size, batch,
                                                      True, apply_updates)
        ce.append(ce_temp)
        out_mean_train.append(out_mean_temp)
        out_std_train.append(out_std_temp)

        print "Training: No.%d iter of Total %d, %d s"% (i,loader.n_iter_train, time()-time_start_iter)  \