Example #1
0
    lr = 1e-2
    decay = (1, 0.2)
    n_epoch = 5
    batch_size = 1000

    # data
    x_tr = np.load('data/x_tr.npy')
    y_tr = np.load('data/y_tr.npy')
    x_te = np.load('data/x_te.npy')
    y_te = np.load('data/y_te.npy')
    y_tr = y_tr.astype(int)
    y_te = y_te.astype(int)
    scaler_x = StandardScaler()
    x_tr = scaler_x.fit_transform(x_tr) / 3
    x_te = scaler_x.transform(x_te) / 3
    x_tr = make_tensor(x_tr, 'x_tr')
    y_tr = make_tensor(y_tr, 'y_tr', dtype=tf.int64)
    x_te = make_tensor(x_te, 'x_te')
    y_te = make_tensor(y_te, 'y_te', dtype=tf.int64)

    # batches
    x_batch, y_batch = _make_batches(x_tr, y_tr, batch_size)
    x_te_batch, y_te_batch = _make_batches(x_te, y_te, batch_size, test=True)

    # Loss
    y_batch_oh = tf.one_hot(y_batch, 2)
    pred = net.predict(x_batch)
    loss = tf.reduce_mean(
        tf.nn.softmax_cross_entropy_with_logits(labels=y_batch_oh,
                                                logits=pred))
    pred_labels = tf.argmax(pred, axis=1)
Example #2
0
        np.save('W1.npy', W1)
        np.save('b1.npy', b1)
        np.save('W2.npy', W2)
        np.save('b2.npy', b2)
        np.save('W3.npy', W3)
        np.save('b3.npy', b3)
        np.save('W4.npy', W4)

with tf.Graph().as_default():
    data_dir = "data_reg/"
    net = NN(H1=20, H2=20, d=4, p=0.5)
    lr = 1e-2

    # data
    x_tr, y_tr, x_te, y_te = prepare_data(data_dir, mode='numpy')
    x_tr = make_tensor(x_tr, 'x_tr')
    y_tr = make_tensor(y_tr, 'y_tr')
    x_te = make_tensor(x_te, 'x_te')
    y_te = make_tensor(y_te, 'y_te')
    pred_te = net.predict(x_te)
    r2_te = r2(pred_te, y_te)
    mse_te = mse(pred_te, y_te)
    
    decay = (100, 0.2)
    n_epoch = 300
    batch_size = 200
    
    sample = tf.train.slice_input_producer([x_tr, y_tr])
    x_batch, y_batch = tf.train.batch(sample, batch_size)
    pred = net.predict(x_batch)
    loss = mse(pred, y_batch)