info, we should use 1.0 as the drop probability means drop nothing.
    10. Read out layer: 10 hidden neurons fully connected to the drop out layer so the shape of
        parameters in this layer is 1024x10.
'''


# check necessary environment variables are defined
assert 'MLP_DATA_DIR' in os.environ, (
    'An environment variable MLP_DATA_DIR must be set to the path containing'
    ' MLP data before running script.')
assert 'OUTPUT_DIR' in os.environ, (
    'An environment variable OUTPUT_DIR must be set to the path to write'
    ' output to before running script.')

'''In this section, I will load the data and reshape them for RGB channels'''
train_data_10 = CIFAR10DataProvider('train', batch_size=128)
valid_data_10 = CIFAR10DataProvider('valid', batch_size=128)
train_data_100 = CIFAR100DataProvider('train', batch_size=128)
valid_data_100 = CIFAR100DataProvider('valid', batch_size=128)

'''Reshape the train and valid data to -1X32X32X3'''
train_data_100.inputs = train_data_100.inputs.reshape((40000, -1, 3), order='F')
train_data_100.inputs = train_data_100.inputs.reshape((40000, 32, 32, 3))
valid_data_100.inputs = valid_data_100.inputs.reshape((10000, -1, 3), order='F')
valid_data_100.inputs = valid_data_100.inputs.reshape((10000, 32, 32, 3))

#change the valid targets to one hot coding
valid_targets = valid_data_100.to_one_of_k(valid_data_100.targets)

# Prepare some function for the net
def conv2d_stride1(x, W):
Exemplo n.º 2
0
import os
import tensorflow as tf
import numpy as np
import time
from datetime import timedelta
import math
from sklearn.metrics import confusion_matrix
from mlp.data_providers import CIFAR10DataProvider, CIFAR100DataProvider
import matplotlib.pyplot as plt
import pickle
get_ipython().magic(u'matplotlib inline')


# In[2]:

train_data = CIFAR10DataProvider('train', batch_size=50)
valid_data = CIFAR10DataProvider('valid', batch_size=50)


# In[3]:

def plot_images(images, cls_true, cls_pred=None):
    assert len(images) == len(cls_true) == 9
    images = images.reshape(9, 3, 32, 32).transpose(0,2,3,1).astype("float")
    cls_true = np.array(cls_true)
    fig, axes1 = plt.subplots(5,5,figsize=(3,3))
    for j in range(5):
        for k in range(5):
            i = np.random.choice(range(len(images)))
            axes1[j][k].set_axis_off()
            axes1[j][k].imshow(images[i:i+1][0])
Exemplo n.º 3
0
    indices = rng.choice(orig_ims.shape[0], orig_ims.shape[0] // 4, False)
    for i, j in enumerate(indices):
        new_ims[j] = np.fliplr(orig_ims[j])
    return new_ims.reshape((-1, 3072))


# In[6]:

random_seed = 1627522
rng = np.random.RandomState(random_seed)

train_data = AugmentedCIFAR10DataProvider('train',
                                          batch_size=50,
                                          transformer=random_fliplr,
                                          rng=rng)
valid_data = CIFAR10DataProvider('valid', batch_size=50, rng=rng)

# In[6]:

inputs = tf.placeholder(tf.float32, [None, train_data.inputs.shape[1]],
                        'inputs')
targets = tf.placeholder(tf.float32, [None, train_data.num_classes], 'targets')
keep_prob = tf.placeholder(tf.float32)
train = tf.placeholder(bool)

with tf.variable_scope('conv1') as scope:
    reshape = tf.reshape(inputs, [50, 3, 32, 32])
    reshape = tf.transpose(reshape, [0, 2, 3, 1])
    conv1 = conv_bn_layer1(reshape, keep_prob=1.0)

# pool1