コード例 #1
0
  print('{} = {}'.format(attr.upper(), value))
print()

IMAGE_PIXELS = 3072
CLASSES = 10

beginTime = time.time()

data_sets = data_helpers.load_data()

images_placeholder = tf.placeholder(tf.float32, shape=[None, IMAGE_PIXELS],
  name='images')

labels_placeholder = tf.placeholder(tf.int64, shape=[None], name='image-labels')

logits = two_layer_fc.inference(images_placeholder, IMAGE_PIXELS,
  FLAGS.hidden1, CLASSES, reg_constant=FLAGS.reg_constant)

global_step = tf.Variable(0, name="global_step", trainable=False)

accuracy = two_layer_fc.evaluation(logits, labels_placeholder)

saver = tf.train.Saver()

with tf.Session() as sess:
  ckpt = tf.train.get_checkpoint_state(FLAGS.train_dir)
  if ckpt and ckpt.model_checkpoint_path:
    print('Restoring variables from checkpoint')
    saver.restore(sess, ckpt.model_checkpoint_path)
    current_step = tf.train.global_step(sess, global_step)
    print('Current step: {}'.format(current_step))
コード例 #2
0
beginTime = time.time()

data_sets = data_helpers.load_data()

images_placeholder = tf.placeholder(tf.float32,
                                    shape=[None, IMAGE_PIXELS],
                                    name='images')

labels_placeholder = tf.placeholder(tf.int64,
                                    shape=[None],
                                    name='image-labels')

logits = two_layer_fc.inference(images_placeholder,
                                IMAGE_PIXELS,
                                FLAGS.hidden1,
                                CLASSES,
                                reg_constant=FLAGS.reg_constant)

global_step = tf.Variable(0, name="global_step", trainable=False)

accuracy = two_layer_fc.evaluation(logits, labels_placeholder)

saver = tf.train.Saver()

with tf.Session() as sess:
    ckpt = tf.train.get_checkpoint_state(FLAGS.train_dir)
    if ckpt and ckpt.model_checkpoint_path:
        print('Restoring variables from checkpoint')
        saver.restore(sess, ckpt.model_checkpoint_path)
        current_step = tf.train.global_step(sess, global_step)
コード例 #3
0
log_dir = Flags.train_dir + '/' + datetime.now().strftime('%Y%m%d-%H%M%S') + '/'

# use strftime function to set the format of expressing date-time according to yourself
#Load CIFAR-10 data

data_sets = data_helpers.load_data()

# prepare tensorflow graph
#input placeholders

images_placeholder = tf.placeholder(tf.float32,shape=[None,IMAGE_PIXELS])
labels_placeholder = tf.placeholder(tf.int64,shape=[None],name='image-labels')

# Operation for classifier's result
logits = two_layer_fc.inference(image_placeholder,IMAGE_PIXEL,Flags.hidden1,CLASSES,reg_constant=Flags.reg_constant)
# Operation for calculating loss
loss = two_layer_fc.loss(logits,labels_placeholder)
# Operation for training_step
train_step = two_layer_fc.training(loss,Flags.learning_rate)
# Operation for calculating accuracy of our predictions 
accuracy = two_layer_fc.evaluation(logits,labels_placeholder)
# used for merging all the summaries at one place
summary = tf.summary.merge_all()
saver = tf.train.Saver()

# use tf.session() to run tensorflow graph.

with tf.Session() as sess:
    sess.run(tf.global_variable_initializer())
    summary_writer = tf.summary.FileWriter(logdir,sess.graph)