Esempio n. 1
0
def Linear(x):
    return tf.layers.dense(inputs=x, units=dt.getsinifcount(), name='linear')
Esempio n. 2
0
        x = flatten(x)
        x = Linear(x)

        # x = tf.reshape(x, [-1, 10])
        return x


if __name__ == '__main__':
    dt = dt.Data()
    train_x, train_y = dt.read_train_images(120, 120)
    test_x, test_y = dt.read_test_images(120, 120)
    #train_x, test_x = color_preprocessing(train_x, test_x)

    # image_size = 32, img_channels = 3, class_num = 10 in cifar10
    x = tf.placeholder(tf.float32, shape=[None, 120, 120, 3])
    label = tf.placeholder(tf.float32, shape=[None, dt.getsinifcount()])

    training_flag = tf.placeholder(tf.bool)

    learning_rate = tf.placeholder(tf.float32, name='learning_rate')

    logits = DenseNet(x=x,
                      nb_blocks=nb_block,
                      filters=growth_k,
                      training=training_flag).model
    cost = tf.reduce_mean(
        tf.nn.softmax_cross_entropy_with_logits_v2(labels=label,
                                                   logits=logits))
    """
    l2_loss = tf.add_n([tf.nn.l2_loss(var) for var in tf.trainable_variables()])
    optimizer = tf.train.MomentumOptimizer(learning_rate=learning_rate, momentum=nesterov_momentum, use_nesterov=True)