import tensorflow as tf import math from models.slim.datasets import dataset_factory from models.slim.preprocessing import preprocessing_factory import squeezenet import arg_parsing slim = tf.contrib.slim args = arg_parsing.parse_args(training=False) tf.logging.set_verbosity(tf.logging.INFO) with tf.Graph().as_default() as g: with g.device(args.eval_device): dataset = dataset_factory.get_dataset('cifar10', 'test', args.data_dir) tf_global_step = slim.get_or_create_global_step() provider = slim.dataset_data_provider.DatasetDataProvider( dataset, shuffle=False, common_queue_capacity=2 * args.batch_size, common_queue_min=args.batch_size) [image, label] = provider.get(['image', 'label']) image_preprocessing_fn = preprocessing_factory.get_preprocessing( 'cifarnet', is_training=False) image = image_preprocessing_fn(image, 32, 32)
import vgg_cifar10 slim = tf.contrib.slim EXPERIMENT_NAME = 'VGG14_long_train' BATCH_SIZE = 512 CHECKPOINT_DIR = '/mnt/data1/vgg_results/' + EXPERIMENT_NAME + '/train' EVAL_DIR = CHECKPOINT_DIR[:-5] + 'test' DATA_DIR = '/mnt/data1/cifar' EVAL_DEVICE = '/gpu:2' tf.logging.set_verbosity(tf.logging.INFO) with tf.Graph().as_default() as g: with g.device(EVAL_DEVICE): dataset = dataset_factory.get_dataset('cifar10', 'test', DATA_DIR) tf_global_step = slim.get_or_create_global_step() provider = slim.dataset_data_provider.DatasetDataProvider( dataset, shuffle=False, common_queue_capacity=2 * BATCH_SIZE, common_queue_min=BATCH_SIZE) [image, label] = provider.get(['image', 'label']) image_preprocessing_fn = preprocessing_factory.get_preprocessing( 'cifarnet', is_training=False) image = image_preprocessing_fn(image, 32, 32)
DATA_DIR = '/mnt/data1/cifar' TRAIN_DIR = '/mnt/data1/squeezenet_results/LR_01_95_DR_BN/train' BATCH_SIZE = 256 INIT_LEARNING_RATE = 0.01 LR_DECAY = 0.95 NUM_EPOCHS_PER_DECAY = 2 MAX_STEPS = 8000 NUM_CLONES = 3 tf.logging.set_verbosity(tf.logging.INFO) deploy_config = model_deploy.DeploymentConfig(num_clones=NUM_CLONES) with tf.device(deploy_config.variables_device()): global_step = slim.create_global_step() dataset = dataset_factory.get_dataset('cifar10', 'train', '/mnt/data1/cifar') network_fn = squeezenet.inference image_preprocessing_fn = preprocessing_factory.get_preprocessing( 'cifarnet', is_training=True) with tf.device(deploy_config.inputs_device()): with tf.name_scope('inputs'): provider = slim.dataset_data_provider.DatasetDataProvider( dataset, num_readers=7, common_queue_capacity=20 * BATCH_SIZE, common_queue_min=10 * BATCH_SIZE) [image, label] = provider.get(['image', 'label'])