def dndt_predict(train_X, test_X, train_Y, num_class, num_cut, num_leaf,
                 n_bag):
    #
    #
    d = train_X.shape[1]
    sess = tf.InteractiveSession()
    x_ph = tf.placeholder(tf.float32, [None, d])
    y_ph = tf.placeholder(tf.float32, [None, num_class])
    cut_points_list = [tf.Variable(tf.random_uniform([i])) for i in num_cut]
    leaf_score = tf.Variable(tf.random_uniform([num_leaf, num_class]))
    y_pred = nn_decision_tree(x_ph,
                              cut_points_list,
                              leaf_score,
                              temperature=0.1)
    loss = tf.reduce_mean(
        tf.losses.softmax_cross_entropy(logits=y_pred, onehot_labels=y_ph))
    opt = tf.train.AdamOptimizer(0.1)
    train_step = opt.minimize(loss)
    sess.run(tf.global_variables_initializer())
    x_example = []
    y_label = []
    y_ens = []
    label_list = []
    indx = np.array(range(len(train_X)))

    for k in range(n_bag):
        examples = random1(indx)
        for i in examples:
            x = train_X[i]
            x_example.append(x)
            x_data = np.array(x_example)
            y = train_Y[i]
            y_label.append(y)
            y_data = np.array(y_label)
        #
        #
        for i in range(1000):
            _, loss_e = sess.run([train_step, loss],
                                 feed_dict={
                                     x_ph: train_X,
                                     y_ph: train_Y
                                 })
        #
        #
        sample_label = np.argmax(y_pred.eval(feed_dict={x_ph: test_X}), axis=1)
        #
        #
        label_list.append(sample_label)
        x_example.clear()
        y_label.clear()

    avg = np.array(label_list)
    avg = avg.reshape(n_bag, len(test_X))
    avg = np.transpose(avg)

    for i in range(len(test_X)):
        x = ensemble(avg[i, ])
        y_ens.append(x)
    return y_ens
Пример #2
0
 def test_nn_decision_tree(self):
     sess = tf.InteractiveSession()
     x = np.array([[1, 2], [2, 3], [1, 3], [2, 2]]).astype(np.float32)
     cut_points_list = [
         np.array([1.5]).astype(np.float32),
         np.array([2.5]).astype(np.float32)
     ]
     leaf_score = np.array([[4, 1], [3, 2], [2, 3], [1,
                                                     4]]).astype(np.float32)
     res = nn_decision_tree(tf.constant(x),
                            [tf.constant(i) for i in cut_points_list],
                            tf.constant(leaf_score)).eval()
     exp = np.array([[4, 1], [1, 4], [3, 2], [2, 3]])
     np.testing.assert_almost_equal(res, exp, decimal=1)
     sess.close()
Пример #3
0
def random_forest(train_data, test_data, max_features, batch_size, epochs,
                  *args, **kwargs):

    #No. of trees defined by diving the total no. of features by the max no. of features in each tree
    num_trees = int(train_data[0].shape[1] / max_features)

    error = []

    for i in xrange(num_trees):

        features = []

        for i in xrange(max_features):
            features.append(random.randrange(0, train_data[0].shape[1]))

        col_idx = np.array(features)

        X_train = train_data[0][:, col_idx]
        y_train = train_data[1]

        X_test = test_data[0][:, col_idx]
        y_test = test_data[1]

        num_cut = []

        for f in xrange(max_features):
            num_cut.append(1)

        num_leaf = np.prod(np.array(num_cut) + 1)
        num_class = y_train.shape[1]

        seed = 1990

        x_ph = tf.placeholder(tf.float32, [None, max_features])
        y_ph = tf.placeholder(tf.float32, [None, num_class])

        cut_points_list = [
            tf.Variable(tf.random_uniform([i])) for i in num_cut
        ]
        leaf_score = tf.Variable(tf.random_uniform([num_leaf, num_class]))

        y_pred = nn_decision_tree(x_ph,
                                  cut_points_list,
                                  leaf_score,
                                  temperature=10)

        loss = tf.reduce_mean(
            tf.nn.softmax_cross_entropy_with_logits(logits=y_pred,
                                                    labels=y_ph))

        opt = tf.train.AdamOptimizer(0.1)
        train_step = opt.minimize(loss)

        sess = tf.InteractiveSession()
        tf.set_random_seed(1990)

        sess.run(tf.initialize_all_variables())

        for epoch in range(epochs):

            total_batch = int(X_train.shape[0] / batch_size)

            for i in range(total_batch):

                batch_mask = np.random.choice(X_train.shape[0], batch_size)

                batch_x = X_train[batch_mask].reshape(-1, X_train.shape[1])
                batch_y = y_train[batch_mask].reshape(-1, y_train.shape[1])

                _, loss_e = sess.run([train_step, loss],
                                     feed_dict={
                                         x_ph: batch_x,
                                         y_ph: batch_y
                                     })
        """For each tree, the predicted values and the original y_values are stacked vertically 
       in two different numpy arrays after training each tree for 100 epochs"""

        pred = np.vstack(
            np.array(y_pred.eval(feed_dict={x_ph: X_test}), dtype=np.float32))
        orig = np.vstack(np.array(y_test, dtype=np.float32))

        sess.close()

    return (pred, orig)
Пример #4
0
num_cut = []

for features in xrange(d):
    num_cut.append(1)

num_leaf = np.prod(np.array(num_cut) + 1)
num_class = y.shape[1]

x_ph = tf.placeholder(tf.float32, [None, d])
y_ph = tf.placeholder(tf.float32, [None, num_class])

cut_points_list = [tf.Variable(tf.random_uniform([i])) for i in num_cut]
leaf_score = tf.Variable(tf.random_uniform([num_leaf, num_class]))

y_pred = nn_decision_tree(x_ph, cut_points_list, leaf_score, temperature=10)

loss = tf.reduce_mean(
    tf.nn.softmax_cross_entropy_with_logits(logits=y_pred, labels=y_ph))

opt = tf.train.AdamOptimizer(0.1)
train_step = opt.minimize(loss)

sess = tf.InteractiveSession()
tf.set_random_seed(1990)

start_time = time.time()

sess.run(tf.initialize_all_variables())

for epoch in range(epochs):