def _mini_batch(model, mini_batch, batch, is_train, apply_updates =None ):
    global insp_
    ce = []
    for i in xrange(batch.mini/batch.micro):
        if not is_train:
            output = model(mini_batch, i)
            ce.append([output[0], output[1]])
    return _avg(ce)
예제 #2
0
def _mini_batch(model, mini_batch, batch, is_train, apply_updates=None):
    global insp_
    ce = []
    for i in xrange(batch.mini / batch.micro):
        if not is_train:
            output = model(mini_batch, i)
            ce.append([output[0], output[1]])
    return _avg(ce)
예제 #3
0
def _batch(model, batch_size, batch, is_train=True, apply_updates=None):
    ce = []
    for i in xrange(batch_size / batch.mini):
        ce.append(_mini_batch(model, i, batch, is_train, apply_updates))
    return _avg(ce)
        time_start_iter = time()
        loader.next_train_batch(x_, y_, x_skeleton_)
        tr.batch_size = y_.get_value(borrow=True).shape[0]
        ce.append(_batch(train_model, tr.batch_size, batch, True, apply_updates))
       
        timing_report(i, time()-time_start_iter, tr.batch_size, res_dir)
        print "\t| "+ training_report(ce[-1]) + ", finish total of: 0." + str(i*1.0/loader.n_iter_train)
    # End of Epoch
    ####################################################################
    ####################################################################
    print "\n%s\n\t End of epoch %d, \n printing some debug info.\n%s" \
        %('-'*30, epoch, '-'*30)
    ####################################################################
    ####################################################################
    # print insp_
    train_ce.append(_avg(ce))
    # validate
    valid_ce.append(test_lio_skel(use, test_model, batch, drop, tr.rng, epoch, tr.batch_size, x_, y_, loader, x_skeleton_))

    # save best params
    # if valid_ce[-1][1] < 0.25:
    res_dir = save_results(train_ce, valid_ce, res_dir, params=params)
    if not tr.moved: res_dir = move_results(res_dir)

    if valid_ce[-1][1] < best_valid:
        save_params(params, res_dir, "best")
    save_params(params, res_dir)

    if valid_ce[-1][1] < best_valid:
        best_valid = valid_ce[-1][1]
        print('tr.batch_size_after=%d' % tr.batch_size)
        ce.append(
            _batch(train_model, tr.batch_size, batch, True, apply_updates)[0])
        print "the %d iteration,time used:%d" % (i, time() - time_start_iter)
        #timing_report(i, time()-time_start_iter, tr.batch_size, res_dir)
        print "\t| " + training_report(ce[-1]) + ", finish total of: 0." + str(
            i * 1.0 / loader.n_iter_train)
    # End of Epoch
    ####################################################################
    ####################################################################
    print "\n%s\n\t End of epoch %d, \n printing some debug info.\n%s" \
        %('-'*30, epoch, '-'*30)
    ####################################################################
    ####################################################################
    # print insp_
    train_ce.append(_avg(ce))
    # validate
    valid_ce.append(
        test_lio_skel(use, test_model, batch, drop, tr.rng, epoch,
                      tr.batch_size, x_, y_, loader, x_skeleton_))

    # save best params
    # if valid_ce[-1][1] < 0.25:
    res_dir = save_results(train_ce, valid_ce, res_dir, params=params)
    if not tr.moved: res_dir = move_results(res_dir)

    if valid_ce[-1][1] < best_valid:
        save_params(params, res_dir, "best")
    save_params(params, res_dir)

    if valid_ce[-1][1] < best_valid:
예제 #6
0
	#print out_mean_train, out_std_train
        ce.append(ce_temp)
        out_mean_train.append(out_mean_temp)
        out_std_train.append(out_std_temp)

        print "Training: No.%d iter of Total %d, %d s"% (i,loader.n_iter_train, time()-time_start_iter)  \
                + "\t| negative_log_likelihood "+ training_report(ce[-1]) 
    # End of Epoch
    ####################################################################
    ####################################################################
    print "\n%s\n\t End of epoch %d, \n printing some debug info.\n%s" \
        %('-'*30, epoch, '-'*30)
    ####################################################################
    ####################################################################
    print ce
    train_ce.append(_avg(ce))
    out_mean_all.append(_avg(out_mean_train))
    out_std_all.append(_avg(out_std_train))
    # validate
    valid_ce.append(test_lio_skel(use, test_model, batch, drop, tr.rng, epoch, tr.batch_size, x_, y_, loader, x_skeleton_))

    # save best params
    res_dir = save_results(train_ce, valid_ce, res_dir, params=params, out_mean_train=out_mean_all,out_std_train=out_std_all)
    if not tr.moved: res_dir = move_results(res_dir)

    if valid_ce[-1][1] < best_valid:
        save_params(params, res_dir, "best")
    save_params(params, res_dir)

    if valid_ce[-1][1] < best_valid:
        best_valid = valid_ce[-1][1]
        #load data
        time_start_iter = time()
        loader.next_train_batch(x_, y_, x_skeleton_)
        ce_temp, out_mean_temp, out_std_temp = _batch(train_model, tr.batch_size, batch, True, apply_updates)
        ce.append(ce_temp)
        out_mean_train.append(out_mean_temp)
        out_std_train.append(out_std_temp)

        print "Training: No.%d iter of Total %d, %d s"% (i,loader.n_iter_train, time()-time_start_iter)  \
                + "\t| negative_log_likelihood "+ training_report(ce[-1]) 
    # End of Epoch
    ####################################################################
    print "\n%s\n\t End of epoch %d, \n printing some debug info.\n%s" \
        %('-'*30, epoch, '-'*30)

    train_ce.append(_avg(ce))
    out_mean_all.append(_avg(out_mean_train))
    out_std_all.append(_avg(out_std_train))
    # validate
    valid_ce.append(test_lio_skel(use, test_model, batch, drop, tr.rng, epoch, tr.batch_size, x_, y_, loader, x_skeleton_))

    # save best params
    res_dir = save_results(train_ce, valid_ce, res_dir, params=net_convnet3d_grbm_early_fusion.params, out_mean_train=out_mean_all,out_std_train=out_std_all)
    if not tr.moved: res_dir = move_results(res_dir)

    if valid_ce[-1][1] < best_valid:
        save_params(net_convnet3d_grbm_early_fusion.params, res_dir, "best")
    save_params(net_convnet3d_grbm_early_fusion.params, res_dir)

    if valid_ce[-1][1] < best_valid:
        best_valid = valid_ce[-1][1]
def _batch(model, batch_size, batch, is_train=True, apply_updates=None):
    ce = []
    for i in xrange(batch_size/batch.mini): ce.append(_mini_batch(model, i, batch, is_train, apply_updates))
    return _avg(ce)