dropout1 = tf.nn.dropout(dense1, rember)
'''tf.nn.dropout()是tensorflow里面为了防止或减轻过拟合而使用的函数,它一般用在全连接层
Dropout就是在不同的训练过程中随机扔掉一部分神经元。也就是让某个神经元的激活值以一定的概率p,
让其停止工作,这次训练过程中不更新权值,也不参加神经网络的计算。但是它的权重得保留下来(只是
暂时不更新而已),因为下次样本输入时它可能又得工作了'''

# In[8]:

# 全连接层2
fc2 = tf.Variable(tf.truncated_normal(shape=[512, 10], stddev=0.01))
out = tf.matmul(dropout1, fc2)
print(out)
cost = tf.reduce_mean(
    tf.nn.softmax_cross_entropy_with_logits(logits=out, labels=y_lab))
optimizer = tf.train.AdamOptimizer(0.01).minimize(cost)
dr = cifar_reader.Cifar10DataReader(
    cifar_folder="C:/Users/Administrator/cifar-10-batches-py/")

# In[9]:

# 测试网络
correct_pred = tf.equal(tf.argmax(out, 1), tf.argmax(y_lab, 1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))

# In[10]:

# 初始化所有的共享变量
init = tf.initialize_all_variables()
saver = tf.train.Saver()

# In[11]:
bn_fc1 = tf.layers.batch_normalization(tf.matmul(dense_tmp, fc1),
                                       training=is_traing)

dense1 = tf.nn.relu(bn_fc1)
dropout1 = tf.nn.dropout(dense1, keep_prob)

#fc2
fc2 = tf.Variable(tf.truncated_normal(shape=[1024, 10], stddev=0.04))
out = tf.matmul(dropout1, fc2)
print(out)

cost = tf.reduce_mean(
    tf.nn.softmax_cross_entropy_with_logits(logits=out, labels=y))
optimizer = tf.train.AdamOptimizer(0.01).minimize(cost)

dr = cifar_reader.Cifar10DataReader(cifar_folder="./cifar-10-batches-py/")

# 测试网络
correct_pred = tf.equal(tf.argmax(out, 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))

# 初始化所有的共享变量
init = tf.initialize_all_variables()

saver = tf.train.Saver()
c = []
# 开启一个训练
start_time = time.time()
with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    # saver.restore(sess, "model_tmp/cifar10_demo.ckpt")
Example #3
0
bn_fc1 = tf.layers.batch_normalization(tf.matmul(dense_tmp, fc1),
                                       training=is_traing)

dense1 = tf.nn.relu(bn_fc1)
dropout1 = tf.nn.dropout(dense1, keep_prob)

#fc2
fc2 = tf.Variable(tf.truncated_normal(shape=[1024, 10], stddev=0.04))
out = tf.matmul(dropout1, fc2)
print(out)

cost = tf.reduce_mean(
    tf.nn.softmax_cross_entropy_with_logits(logits=out, labels=y))
optimizer = tf.train.AdamOptimizer(0.01).minimize(cost)

dr = cifar_reader.Cifar10DataReader(cifar_folder="./MIT-Cars-dataset")

# 测试网络
correct_pred = tf.equal(tf.argmax(out, 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))

# 初始化所有的共享变量
init = tf.initialize_all_variables()

saver = tf.train.Saver()

# 开启一个训练
starttime = datetime.datetime.now()
with tf.Session() as sess:
    sess.run(init)
    # saver.restore(sess, "model_tmp/cifar10_demo.ckpt")