Exemplo n.º 1
0
prediction = tf.nn.softmax(tf.matmul(h_fc1_drop, W_fc2) + b_fc2)

# the error between prediction and real data
cross_entropy = tf.reduce_mean(
    -tf.reduce_sum(ys * tf.log(prediction), reduction_indices=[1]))  # loss
train_step = tf.train.AdamOptimizer(1e-4).minimize(cross_entropy)

sess = tf.Session()
sess.run(tf.global_variables_initializer())

#GiveAnswer(mnist.test.images[3])
#print("ans:",list(mnist.test.labels[3]).index(1))

from MKPicSet import PicSet as PS

MP = PS()
MP.AddDir('./TraingData/')

a = 0
i = 0
while a < cf.SuccessRate and i < cf.MAX_Training:
    batch_xs, batch_ys = MP.batch(cf.batch)
    sess.run(train_step,
             feed_dict={
                 xs: batch_xs,
                 ys: batch_ys,
                 keep_prob: 0.5
             })
    if i % 10 == 0:
        MP.reset()
        test_xs, test_ys = MP.batch(cf.batch)
Exemplo n.º 2
0
                                                   predictions,
                                                   name="my_metric")
running_vars = tf.get_collection(tf.GraphKeys.LOCAL_VARIABLES,
                                 scope="my_metric")
running_vars_initializer = tf.variables_initializer(var_list=running_vars)

# Initializing the variables
init = tf.initialize_all_variables()
running_vars_initializer = tf.variables_initializer(var_list=running_vars)

#############################################################
#sess = tf.Session()
#sess.run(tf.global_variables_initializer())
from MKPicSet import PicSet as PS

MP = PS()
MP.Addtest('./TraingData10/')
#MP.show()
NumberOfOneTraing = 1253  #每單次訓練的使用的圖象數量

from test import PicSet as TPS

TMP = TPS()
#print(1)
TMP.Addtest('./TraingData10/')
#print(2)
#MP.show()
TNumberOfOneTraing = 1303
###########################################################
#print(3)
with tf.Session() as sess:
Exemplo n.º 3
0
prediction = tf.nn.softmax(tf.matmul(h_fc1_drop, W_fc2) + b_fc2)

# the error between prediction and real data
cross_entropy = tf.reduce_mean(
    -tf.reduce_sum(ys * tf.log(prediction), reduction_indices=[1]))  # loss
train_step = tf.train.AdamOptimizer(1e-4).minimize(cross_entropy)

sess = tf.Session()
sess.run(tf.global_variables_initializer())

#GiveAnswer(mnist.test.images[3])
#print("ans:",list(mnist.test.labels[3]).index(1))

from MKPicSet import PicSet as PS

MP = PS()
MP.AddDir('./TraingData/')
NumberOfOneTraing = 100  #每單次訓練的使用的圖象數量

for i in range(101):  #訓練次數
    batch_xs, batch_ys = MP.batch(NumberOfOneTraing)
    sess.run(train_step,
             feed_dict={
                 xs: batch_xs,
                 ys: batch_ys,
                 keep_prob: 0.5
             })
    if i % 10 == 0:
        test_xs, test_ys = MP.batch(100)
        #print(test_xs[0],test_ys[0])
        print(str(i * NumberOfOneTraing), ",",