Esempio n. 1
0
def test_data(sess, X, Y, index_list, S_list, R_L_list, F_list, e, pre_test,
              n_batches):
    LStateList_F_t = ut.get_zero_state(params)
    LStateList_F_pre = ut.get_zero_state(params)
    LStateList_K_t = ut.get_zero_state(params, t='K')
    LStateList_K_pre = ut.get_zero_state(params, t='K')
    state_reset_counter_lst = [0 for i in range(batch_size)]
    total_loss = 0.0
    total_n_count = 0.0
    for minibatch_index in xrange(n_batches):
        state_reset_counter_lst = [s + 1 for s in state_reset_counter_lst]
        (LStateList_F_pre,LStateList_K_pre,_,x,y,r,f,state_reset_counter_lst)=\
            dut.prepare_kfl_QRFf_batch(index_list, minibatch_index, batch_size,
                                       S_list, LStateList_F_t, LStateList_F_pre, LStateList_K_t, LStateList_K_pre,
                                       None, None, params, Y, X, R_L_list,F_list,state_reset_counter_lst)
        gt = y
        mes = x
        # print(r)
        feed = {
            tracker._z: mes,
            tracker.target_data: gt,
            tracker.repeat_data: r,
            tracker.initial_state: LStateList_F_pre,
            tracker.initial_state_Q_noise: LStateList_K_pre,
            tracker.output_keep_prob: 1
        }
        # feed = {tracker._z: mes, tracker.target_data: gt, tracker.initial_state: LStateList_F_pre
        #        , tracker._P_inp: P, tracker._I: I}
        LStateList_F_t,LStateList_K_t,final_output,y = \
            sess.run([tracker.final_state_F,tracker.final_state_K,
                      tracker.final_output,tracker.y], feed)

        tmp_lst = []
        for item in LStateList_F_t:
            tmp_lst.append(item.c)
            tmp_lst.append(item.h)
        LStateList_F_t = tmp_lst

        tmp_lst = []
        for item in LStateList_K_t:
            tmp_lst.append(item.c)
            tmp_lst.append(item.h)
        LStateList_K_t = tmp_lst

        # print(y)
        # print(y.shape)
        # print(final_output.shape)
        if params["normalise_data"] == 3 or params["normalise_data"] == 2:
            final_output = ut.unNormalizeData(final_output, params["y_men"],
                                              params["y_std"])
            y = ut.unNormalizeData(y, params["y_men"], params["y_std"])
        test_loss, n_count = ut.get_loss(params, gt=y, est=final_output)
        total_loss += test_loss * n_count
        total_n_count += n_count
        # if (minibatch_index%show_every==0):
        #     print pre_test+" test batch loss: (%i / %i / %i)  %f"%(e,minibatch_index,n_train_batches,test_loss)
    total_loss = total_loss / total_n_count
    s = pre_test + ' Loss --> epoch %i | error %f' % (e, total_loss)
    ut.log_write(s, params)
    return total_loss
Esempio n. 2
0
def test_data(sess,params,X,Y,index_list,S_list,R_L_list,F_list,e, pre_test,n_batches):
    is_test=1
    dic_state=ut.get_state_list(params)
    I= np.asarray([np.diag([1.0]*params['n_output']) for i in range(params["batch_size"])],dtype=np.float32)
    params["reset_state"]=-1 #Never reset

    state_reset_counter_lst=[0 for i in range(batch_size)]
    total_loss=0.0
    total_pred_loss=0.0
    total_meas_loss=0.0
    total_n_count=0.0
    for minibatch_index in xrange(n_batches):
        state_reset_counter_lst=[s+1 for s in state_reset_counter_lst]
        # print state_reset_counter_lst
        (dic_state,x,y,r,f,_,state_reset_counter_lst,_)= \
            th.prepare_batch(is_test,index_list, minibatch_index, batch_size,
                                       S_list, dic_state, params, Y, X, R_L_list,F_list,state_reset_counter_lst)
        feed=th.get_feed(Model,params,r,x,y,I,dic_state, is_training=0)

        states,final_output,final_pred_output,final_meas_output,y =sess.run([Model.states,Model.final_output,Model.final_pred_output,Model.final_meas_output,Model.y], feed)

        for k in states.keys():
            dic_state[k] = states[k]

        if params["normalise_data"]==3 or params["normalise_data"]==2:
            final_output=ut.unNormalizeData(final_output,params["y_men"],params["y_std"])
            final_pred_output=ut.unNormalizeData(final_pred_output,params["y_men"],params["y_std"])
            final_meas_output=ut.unNormalizeData(final_meas_output,params["x_men"],params["x_std"])
            y=ut.unNormalizeData(y,params["y_men"],params["y_std"])
        if params["normalise_data"]==4:
            final_output=ut.unNormalizeData(final_output,params["x_men"],params["x_std"])
            final_pred_output=ut.unNormalizeData(final_pred_output,params["x_men"],params["x_std"])
            final_meas_output=ut.unNormalizeData(final_meas_output,params["x_men"],params["x_std"])
            y=ut.unNormalizeData(y,params["x_men"],params["x_std"])

        test_loss,n_count=ut.get_loss(params,gt=y,est=final_output,r=r)
        test_pred_loss,n_count=ut.get_loss(params,gt=y,est=final_pred_output,r=r)
        test_meas_loss,n_count=ut.get_loss(params,gt=y,est=final_meas_output,r=r)
        total_loss+=test_loss*n_count
        total_pred_loss+=test_pred_loss*n_count
        total_meas_loss+=test_meas_loss*n_count
        total_n_count+=n_count
        # if (minibatch_index%show_every==0):
        #     print pre_test+" test batch loss: (%i / %i / %i)  %f"%(e,minibatch_index,n_train_batches,test_loss)
    total_loss=total_loss/total_n_count
    total_pred_loss=total_pred_loss/total_n_count
    total_meas_loss=total_meas_loss/total_n_count
    s =pre_test+' Loss --> epoch %i | error %f, %f, %f'%(e,total_loss,total_pred_loss,total_meas_loss)
    ut.log_write(s,params)
    return total_loss
Esempio n. 3
0
def test_data(sess, params, X, Y, index_list, S_list, R_L_list, F_list, e,
              pre_test, n_batches):
    dic_state = ut.get_state_list(params)
    I = np.asarray([
        np.diag([1.0] * params['n_output'])
        for i in range(params["batch_size"])
    ],
                   dtype=np.float32)
    dict_err = {}
    dict_name = {}
    uniq_lst = [item for item in collections.Counter(S_list)]
    is_test = 1

    file_lst = []

    for u in uniq_lst:
        idx = np.where(S_list == u)
        sname = F_list[idx][0][0][0].split('/')[-2]
        dict_name[u] = sname
        dict_err[u] = []

    state_reset_counter_lst = [0 for i in range(batch_size)]
    total_loss = 0.0
    total_n_count = 0.0
    full_curr_id_lst = []
    full_noise_lst = []
    full_r_lst = []
    full_y_lst = []
    full_final_output_lst = []
    for minibatch_index in xrange(n_batches):
        state_reset_counter_lst = [s + 1 for s in state_reset_counter_lst]
        (dic_state,x_sel,y,r,f,curr_sid,state_reset_counter_lst,curr_id_lst)= \
            th.prepare_batch(is_test,index_list, minibatch_index, batch_size,
                                       S_list, dic_state, params, Y, X, R_L_list,F_list,state_reset_counter_lst)
        feed = th.get_feed(tracker,
                           params,
                           r,
                           x_sel,
                           y,
                           I,
                           dic_state,
                           is_training=0)

        if params["model"] == "lstm":
            states, final_output, sel_y = sess.run(
                [tracker.states, tracker.final_output, tracker.y], feed)
        else:
            states,final_output,full_final_output,sel_y,x,qnoise_lst =\
                sess.run([tracker.states,tracker.final_output,tracker.full_final_output,tracker.y,tracker.x,tracker.qnoise_lst], feed)
        full_final_output = np.asarray(full_final_output).reshape(
            (batch_size, params['seq_length'], params['n_output']))
        for k in states.keys():
            dic_state[k] = states[k]

        full_curr_id_lst.extend(curr_id_lst)
        full_r_lst.extend(r)
        file_lst.extend(f)
        full_final_output_lst.extend(full_final_output)
        full_y_lst.extend(y)

        if params["model"] != "lstm":
            full_noise_lst.extend(qnoise_lst)

    # total_loss=total_loss/total_n_count

    index_lst = sh.get_nondublicate_lst(full_curr_id_lst)
    full_r_lst = np.asarray(full_r_lst)[index_lst]

    # if params["model"] != "lstm":
    #     full_noise_lst=np.asarray(full_noise_lst)[index_lst]
    #     full_noise_lst=full_noise_lst[full_r_lst==1]

    full_final_output_lst = np.asarray(full_final_output_lst)[index_lst]
    full_y_lst = np.asarray(full_y_lst)[index_lst]
    file_lst = np.asarray(file_lst)[index_lst]

    file_lst = file_lst[full_r_lst == 1]
    full_final_output_lst = full_final_output_lst[full_r_lst == 1]
    full_y_lst = full_y_lst[full_r_lst == 1]
    dict_err = {}

    if params["normalise_data"] == 3 or params["normalise_data"] == 2:
        full_final_output_lst = ut.unNormalizeData(full_final_output_lst,
                                                   params["y_men"],
                                                   params["y_std"])
        full_y_lst = ut.unNormalizeData(full_y_lst, params["y_men"],
                                        params["y_std"])

    if params["normalise_data"] == 4:
        full_final_output_lst = ut.unNormalizeData(full_final_output_lst,
                                                   params["x_men"],
                                                   params["x_std"])
        full_y_lst = ut.unNormalizeData(full_y_lst, params["x_men"],
                                        params["x_std"])

    full_loss, dict_err = sh.get_loss(file_lst,
                                      gt=full_y_lst,
                                      est=full_final_output_lst)
    # np.savetxt('trials/garb/x',np.asarray(x_lst))
    if params["sequence"] == "David":
        for u in dict_err.keys():
            seq_err = dict_err[u]
            median_result = np.median(seq_err, axis=0)
            mean_result = np.mean(seq_err, axis=0)
            print 'Epoch:', e, ' full ', u, ' median/mean error ', median_result[
                0], '/', mean_result[0], 'm  and ', median_result[
                    1], '/', mean_result[1], 'degrees.'

    else:
        median_result = np.median(full_loss, axis=0)
        mean_result = np.mean(full_loss, axis=0)
        if params["data_mode"] == "xyx":
            print 'Epoch:', e, ' full sequence median/mean error ', median_result[
                0], '/', mean_result[0], ''
        elif params["data_mode"] == "q":
            print 'Epoch:', e, ' full sequence median/mean error ', median_result[
                0], '/', mean_result[0], 'degrees.'
        else:
            print 'Epoch:', e, ' full sequence median/mean error ', median_result[
                0], '/', mean_result[0], 'm  and ', median_result[
                    1], '/', mean_result[1], 'degrees.'

    # s =pre_test+' Loss --> epoch %i | error %f'%(e,total_loss)
    # ut.log_write(s,params)
    return total_loss, median_result, mean_result, full_final_output_lst, file_lst, full_noise_lst
Esempio n. 4
0
def test_data(sess, params, X, Y, index_list, S_list, R_L_list, F_list, e,
              pre_test, n_batches):
    dic_state = ut.get_state_list(params)
    I = np.asarray([
        np.diag([1.0] * params['n_output'])
        for i in range(params["batch_size"])
    ],
                   dtype=np.float32)
    is_test = 1

    state_reset_counter_lst = [0 for i in range(batch_size)]
    total_loss = 0.0
    total_n_count = 0.0
    for minibatch_index in xrange(n_batches):
        state_reset_counter_lst = [s + 1 for s in state_reset_counter_lst]
        (dic_state,x,y,r,f,_,state_reset_counter_lst,_)= \
            th.prepare_batch(is_test,index_list, minibatch_index, batch_size,
                                       S_list, dic_state, params, Y, X, R_L_list,F_list,state_reset_counter_lst)
        feed = th.get_feed(tracker,
                           params,
                           r,
                           x,
                           y,
                           I,
                           dic_state,
                           is_training=0)

        if mode == 'klstm':
            states,final_output,final_pred_output,final_meas_output,q_mat,r_mat,k_mat,y =\
                sess.run([tracker.states,tracker.final_output,tracker.final_pred_output,tracker.final_meas_output,
                      tracker.final_q_output,tracker.final_r_output,tracker.final_k_output,tracker.y], feed)
        else:
            states, final_output, y = \
                sess.run([tracker.states, tracker.final_output, tracker.y], feed)

        for k in states.keys():
            dic_state[k] = states[k]

        if params["normalise_data"] == 3 or params["normalise_data"] == 2:
            final_output = ut.unNormalizeData(final_output, params["y_men"],
                                              params["y_std"])
            y = ut.unNormalizeData(y, params["y_men"], params["y_std"])

        if params["normalise_data"] == 4:
            final_output = ut.unNormalizeData(final_output, params["x_men"],
                                              params["x_std"])
            y = ut.unNormalizeData(y, params["x_men"], params["x_std"])
            if mode == 'klstm':
                final_pred_output = ut.unNormalizeData(final_pred_output,
                                                       params["x_men"],
                                                       params["x_std"])
                final_meas_output = ut.unNormalizeData(final_meas_output,
                                                       params["x_men"],
                                                       params["x_std"])

        test_loss, n_count = ut.get_loss(params,
                                         gt=y,
                                         est=final_output,
                                         r=None)
        f = f.reshape((-1, 2))
        y_f = y.reshape(final_output.shape)
        r = r.flatten()
        fnames = f[np.nonzero(r)]
        # e=final_output[np.nonzero(r)]
        if mode == 'klstm':
            ut.write_est(est_file=params["est_file"] + "/kal_est/",
                         est=final_output,
                         file_names=fnames)
            ut.write_est(est_file=params["est_file"] + "/kal_est_dif/",
                         est=np.abs(final_output - y_f),
                         file_names=fnames)
            ut.write_est(est_file=params["est_file"] + "/kal_pred/",
                         est=final_pred_output,
                         file_names=fnames)
            ut.write_est(est_file=params["est_file"] + "/kal_pred_dif/",
                         est=np.abs(final_pred_output - y_f),
                         file_names=fnames)
            ut.write_est(est_file=params["est_file"] + "/meas/",
                         est=final_meas_output,
                         file_names=fnames)
            ut.write_est(est_file=params["est_file"] + "/q_mat/",
                         est=q_mat,
                         file_names=fnames)
            ut.write_est(est_file=params["est_file"] + "/r_mat/",
                         est=r_mat,
                         file_names=fnames)
            ut.write_est(est_file=params["est_file"] + "/k_mat/",
                         est=k_mat,
                         file_names=fnames)
            ut.write_est(est_file=params["est_file"] + "/y_f/",
                         est=y_f,
                         file_names=fnames)
        else:
            ut.write_est(est_file=params["est_file"],
                         est=final_output,
                         file_names=fnames)
        # print test/_loss
        total_loss += test_loss * n_count

        total_n_count += n_count
        print total_loss / total_n_count
        # if (minibatch_index%show_every==0):
        #     print pre_test+" test batch loss: (%i / %i / %i)  %f"%(e,minibatch_index,n_train_batches,test_loss)
    total_loss = total_loss / total_n_count
    s = pre_test + ' Loss --> epoch %i | error %f' % (e, total_loss)
    ut.log_write(s, params)
    return total_loss