Exemple #1
0
    create dir
    :param path:
    :return:
    """
    a = os.path.exists(path)
    if os.path.exists(path) is False:
        try:
            os.makedirs(path)
            print('{0} has been created'.format(path))
        except Exception as e:
            print(e)


if __name__ == "__main__":

    num_train_samples = get_num_samples(record_dir=FLAGS.train_data_dir)
    num_val_samples = get_num_samples(record_dir=FLAGS.test_data_dir)

    # get total step of the number train epoch
    step_per_epoch = num_train_samples // FLAGS.batch_size  # get num step of per epoch
    max_step = FLAGS.epoch * step_per_epoch  # get total step of several epoch

    vgg = VGG16(input_shape=[FLAGS.height, FLAGS.width, FLAGS.depth],
                num_classes=FLAGS.num_classes,
                batch_size=FLAGS.batch_size,
                learning_rate=FLAGS.learning_rate,
                decay_rate=FLAGS.decay_rate,
                num_samples_per_epoch=num_train_samples,
                num_epoch_per_decay=FLAGS.num_epoch_per_decay,
                keep_prob=FLAGS.keep_prop,
                weight_decay=FLAGS.weight_decay)
Exemple #2
0
            tf.global_variables_initializer(),
            tf.local_variables_initializer()
        )
        sess.run(init_op)

        inputs, labels = sess.run([inputs, labels])
        feed_dict = inception_v3.fill_feed_dict(image_feed=inputs, label_feed=labels, is_training=True)

        logits = sess.run(fetches=[inception_v3.logits], feed_dict=feed_dict)
        assert list(logits[0].shape) == [batch_size, FLAGS.num_classes]

#
#
if __name__ == "__main__":

    num_samples = get_num_samples(record_file=FLAGS.train_dir)
    batch_size = num_samples // FLAGS.step_per_epoch

    inception_v3 = InceptionV3(input_shape=[FLAGS.height, FLAGS.width, FLAGS.depth],
                               num_classes=FLAGS.num_classes,
                               batch_size=batch_size, 
                               decay_rate=FLAGS.decay_rate,
                               learning_rate=FLAGS.learning_rate,
                               num_samples_per_epoch=num_samples,
                               num_epoch_per_decay=FLAGS.num_epoch_per_decay,
                               keep_prob=FLAGS.keep_prob,
                               regular_weight_decay=FLAGS.regular_weight_decay,
                               batch_norm_decay=FLAGS.batch_norm_decay,
                               batch_norm_epsilon=FLAGS.batch_norm_epsilon,
                               batch_norm_fused=FLAGS.batch_norm_fused,
                               is_pretrain = FLAGS.is_pretrain
Exemple #3
0
                   'Number of probability that each element is kept.')
flags.DEFINE_float('weight_decay', 0.0005, 'Number of regular scale size')
flags.DEFINE_bool('is_pretrain', True, 'if True, use pretrain model.')
flags.DEFINE_string('pretrain_model_dir', pretrain_model_dir,
                    'Directory to restore pretrain model dir.')
flags.DEFINE_string('train_data', train_data,
                    'Directory to put the training data.')
flags.DEFINE_string('val_data', val_data,
                    'Directory to put the validation data.')
flags.DEFINE_string('model_dir', model_dir, 'Directory to save model.')
flags.DEFINE_string('logs_dir', logs_dir, 'direct of summary logs.')
flags.DEFINE_integer('save_step_period', 2000, 'save model step period')

if __name__ == "__main__":

    num_train_samples = get_num_samples(record_dir=FLAGS.train_data)
    num_val_samples = get_num_samples(record_dir=FLAGS.val_data)
    # approximate samples per epoch

    # get total step of the number train epoch
    train_step_per_epoch = num_train_samples // FLAGS.batch_size  # get num step of per epoch
    # max_step = FLAGS.epoch * step_per_epoch  # get total step of several epoch

    # get the number step of one validation epoch
    val_step_per_epoch = num_val_samples // FLAGS.batch_size

    alex_net = AlexNet(input_shape=[FLAGS.height, FLAGS.width, FLAGS.depth],
                       num_classes=FLAGS.num_classes,
                       batch_size=FLAGS.batch_size,
                       learning_rate=FLAGS.learning_rate,
                       keep_prob=FLAGS.keep_prop,