Example #1
0
import sys
import itertools
import numpy as np
import tensorflow as tf
from mlp.data_providers import CIFAR10DataProvider, CIFAR100DataProvider

# check necessary environment variables are defined
assert 'MLP_DATA_DIR' in os.environ, (
    'An environment variable MLP_DATA_DIR must be set to the path containing'
    ' MLP data before running script.')
assert 'OUTPUT_DIR' in os.environ, (
    'An environment variable OUTPUT_DIR must be set to the path to write'
    ' output to before running script.')

# load data
train_data = CIFAR100DataProvider('train', batch_size=50, shuffle_order=False)
valid_data = CIFAR100DataProvider('valid', batch_size=50, shuffle_order=False)
train_data_coarse = CIFAR100DataProvider('train', batch_size=50, use_coarse_targets=True, shuffle_order=False)
train_data.inputs = train_data.inputs.reshape(train_data.inputs.shape[0], 32, 32, 3)
valid_data.inputs = valid_data.inputs.reshape(valid_data.inputs.shape[0], 32, 32, 3)

valid_inputs = valid_data.inputs
valid_targets = valid_data.to_one_of_k(valid_data.targets)

# ---------------- define helper functions -------------------------------------------------------------------
def fully_connected_layer(l_inputs, input_dim, output_dim, nonlinearity=tf.nn.relu):
    weights = tf.Variable(
        tf.truncated_normal(
            [input_dim, output_dim], stddev=2. / (input_dim + output_dim) ** 0.5),
        'weights')
    biases = tf.Variable(tf.zeros([output_dim]), 'biases')
Example #2
0
import datetime
import numpy as np
import tensorflow as tf
from mlp.data_providers import CIFAR10DataProvider, CIFAR100DataProvider
'''VGG_10'''

# check necessary environment variables are defined
assert 'MLP_DATA_DIR' in os.environ, (
    'An environment variable MLP_DATA_DIR must be set to the path containing'
    ' MLP data before running script.')
assert 'OUTPUT_DIR' in os.environ, (
    'An environment variable OUTPUT_DIR must be set to the path to write'
    ' output to before running script.')
'''In this section, I will load the data and reshape them for RGB channels'''

train_data_100 = CIFAR100DataProvider('train', batch_size=128)
valid_data_100 = CIFAR100DataProvider('valid', batch_size=128)
'''Reshape the train and valid data to -1X32X32X3'''
train_data_100.inputs = train_data_100.inputs.reshape((40000, -1, 3),
                                                      order='F')
train_data_100.inputs = train_data_100.inputs.reshape((40000, 32, 32, 3))
valid_data_100.inputs = valid_data_100.inputs.reshape((10000, -1, 3),
                                                      order='F')
valid_data_100.inputs = valid_data_100.inputs.reshape((10000, 32, 32, 3))

#change the valid targets to one hot coding
valid_targets = valid_data_100.to_one_of_k(valid_data_100.targets)


# Prepare some function for the net
def conv2d_stride1(x, W):
Example #3
0
# # object recognition with CIFAR-100

# In[1]:

import os
import tensorflow as tf
import numpy as np
from mlp.data_providers import CIFAR10DataProvider, CIFAR100DataProvider
import matplotlib.pyplot as plt
import time

# In[2]:

seed = 10102016
rng = np.random.RandomState(seed)
train_data = CIFAR100DataProvider('train', batch_size=50, rng=rng)
valid_data = CIFAR100DataProvider('valid', batch_size=50, rng=rng)

# # Regularisation

# ## Experiment Baseline
# ### CIFAR-10

# In[3]:


def fully_connected_layer(inputs,
                          input_dim,
                          output_dim,
                          nonlinearity=tf.nn.relu,
                          dropout=False,