Exemplo n.º 1
0
logits = tf.layers.dense(hidden_layer_1, 10)

# label placeholder
label_placeholder = tf.placeholder(tf.uint8, shape=[None])
label_one_hot = tf.one_hot(label_placeholder, 10)

# loss function
loss = tf.reduce_mean(
    tf.nn.softmax_cross_entropy_with_logits_v2(labels=label_one_hot,
                                               logits=logits))

# DONE choose better backpropagation
# backpropagation algorithm
train = tf.train.AdamOptimizer().minimize(loss)

accuracy = dataUtils.accuracy(logits, label_one_hot)

# summaries
tf.summary.scalar('accuracy', accuracy)
tf.summary.scalar('loss', loss)

tf.summary.tensor_summary("logits", logits)
tf.summary.tensor_summary("labels", label_one_hot)
summary_tensor = tf.summary.merge_all()

saver = tf.train.Saver()

# Make tensorflow session
with tf.Session() as sess:
    training_summary_writer = tf.summary.FileWriter(
        TENSORBOARD_LOGDIR + "/training", sess.graph)
Exemplo n.º 2
0
# Logit layer
logits = tf.nn.softmax(tf.layers.dense(
    hidden_layer_2, 2, activation=None))  # replace with your code

# label placeholder
label_placeholder = tf.placeholder(tf.float32,
                                   shape=[None, 2])  # replace with your code

# loss function
loss = tf.reduce_mean(
    tf.nn.softmax_cross_entropy_with_logits_v2(labels=label_placeholder,
                                               logits=logits))
# backpropagation algorithm
train = tf.train.AdamOptimizer().minimize(loss)  # replace with your code

accuracy = dataUtils.accuracy(logits, label_placeholder)

# summaries
tf.summary.scalar('accuracy', accuracy)
tf.summary.scalar('loss', loss)
merged = tf.summary.merge_all()

# Add ops to save and restore all the variables.
saver = tf.train.Saver()

# Make tensorflow session
with tf.Session() as sess:
    train_writer = tf.summary.FileWriter("summary/project1", sess.graph)
    # Initialize variables
    sess.run(tf.global_variables_initializer())