Ejemplo n.º 1
0
                 y) = async_b.get()  # get the return value from your function.
                if (minibatch_index == n_train_batches - 1):
                    pred, H, C = model.predictions(x, is_train, H, C)
                    loss3d = u.get_loss(params, y, pred)
                    batch_loss3d.append(loss3d)

            batch_loss3d = np.nanmean(batch_loss3d)
            if (batch_loss3d < best_loss):
                best_loss = batch_loss3d
                ext = str(epoch_counter) + "_" + str(batch_loss3d) + "_best.p"
                u.write_params(model.params, params, ext)
            else:
                ext = str(val_counter % 2) + ".p"
                u.write_params(model.params, params, ext)

            val_counter += 1
            s = 'VAL--> epoch %i | error %f, %f' % (val_counter, batch_loss3d,
                                                    n_test_batches)
            u.log_write(s, params)


params = config.get_params()
parser = argparse.ArgumentParser(description='Training the module')
parser.add_argument('-m',
                    '--model',
                    help='Model: lstm, erd current(' + params["model"] + ')',
                    default=params["model"])
args = vars(parser.parse_args())
params["model"] = args["model"]
params = config.update_params(params)
train_rnn(params)
Ejemplo n.º 2
0
             loss3d =u.get_loss(params,y,pred)
             batch_loss3d.append(loss3d)
             x=[]
             y=[]
             (sid,H,C,x,y) = async_b.get()  # get the return value from your function.
             if(minibatch_index==n_train_batches-1):
                 pred,H,C= model.predictions(x,is_train,H,C)
                 loss3d =u.get_loss(params,y,pred)
                 batch_loss3d.append(loss3d)

          batch_loss3d=np.nanmean(batch_loss3d)
          if(batch_loss3d<best_loss):
             best_loss=batch_loss3d
             ext=str(epoch_counter)+"_"+str(batch_loss3d)+"_best.p"
             u.write_params(model.params,params,ext)
          else:
              ext=str(val_counter%2)+".p"
              u.write_params(model.params,params,ext)

          val_counter+=1#0.08
          s ='VAL--> epoch %i | error %f, %f'%(val_counter,batch_loss3d,n_test_batches)
          u.log_write(s,params)


params= config.get_params()
parser = argparse.ArgumentParser(description='Training the module')
parser.add_argument('-m','--model',help='Model: lstm, lstm2, erd current('+params["model"]+')',default=params["model"])
args = vars(parser.parse_args())
params["model"]=args["model"]
params=config.update_params(params)
train_rnn(params)
Ejemplo n.º 3
0
params["mfile"]='/mnt/Data1/hc/tt/cp/lstm_nostate1/cp/' # adding more values to params#
rnn_keep_prob=0.8
input_keep_prob=1.0
params['rnn_keep_prob']=rnn_keep_prob
params['input_keep_prob']=input_keep_prob
seq=50 #what does this value signify?
res=5 #what does this value signify?
with tf.Graph().as_default():
    print "seq: ============== %s  ============" % seq
    print "reset_state: ============== %s  ============" % res
    print "rnn_keep_prob: ============== %s  ============" % rnn_keep_prob
    params['normalise_data'] = 4 # adding more values to params, what does this value signify? #
    params['reset_state']=res # adding more values to params, what does this value signify? #
    params['seq_length']=seq # adding more values to params, what does this value signify? #
    params["reload_data"] = 0 # adding more values to params, what does this value signify? #
    params = config.update_params(params) # New param values are updated#
    #params["model"] = "kfl_QRf"
    if params["model"] == "lstm":
        Model = lstm(params=params)
    elif params["model"] == "kfl_QRf":
        Model = kfl_QRf(params=params)
    elif params["model"] == "kfl_Rf":
        Model = kfl_Rf(params=params)
    elif params["model"] == "kfl_QRFf":
        Model = kfl_QRFf(params=params)
    elif params["model"] == "kfl_K":
        Model = kfl_K(params=params)
    params["rn_id"]="dobuleloss081500_nrm4_seq%i_res%i_keep%f_lr%f"%(seq,res,rnn_keep_prob,params["lr"]) # adding more values to params, what does this value signify? #
    params=config.update_params(params) # New param values are updated#
#    (params, X_train, Y_train, F_list_train, G_list_train, S_Train_list, R_L_Train_list,
#             X_test, Y_test, F_list_test, G_list_test, S_Test_list, R_L_Test_list) = \