Пример #1
0
import tensorflow as tf

from notmnist.dataset import loadDataset
from notmnist.settings import *
from notmnist.utils import accuracy

ds = loadDataset('notMNIST_reformatted_conv.pickle')
train_dataset = ds['train_dataset']
train_labels = ds['train_labels']
valid_dataset = ds['valid_dataset']
valid_labels = ds['valid_labels']
test_dataset = ds['test_dataset']
test_labels = ds['test_labels']
exampleWidth = image_size * image_size

num_channels = 1


def maxpoolConvNet(batch_size=16, patch_size=5, depth=16, num_hidden=64):
    graph = tf.Graph()
    with graph.as_default():
        # Input data.
        tf_train_dataset = tf.placeholder(tf.float32,
                                          shape=(batch_size, image_size,
                                                 image_size, num_channels))
        tf_train_labels = tf.placeholder(tf.float32,
                                         shape=(batch_size, num_labels))
        tf_valid_dataset = tf.constant(valid_dataset)
        tf_test_dataset = tf.constant(test_dataset)
        # Variables.
        layer1_weights = tf.Variable(
Пример #2
0
'''
Further performance optimizations of previous networks by adding
more layers and learning rate decay.
'''

import tensorflow as tf
import numpy as np
from tensorflow.python.framework import dtypes

from notmnist.dataset import loadDataset
from notmnist.settings import *

ds = loadDataset('notMNIST_reformatted_1d_images.pickle')
train_dataset = ds['train_dataset']
train_labels = ds['train_labels']
valid_dataset = ds['valid_dataset']
valid_labels = ds['valid_labels']
test_dataset = ds['test_dataset']
test_labels = ds['test_labels']
exampleWidth = image_size * image_size

import attr


@attr.s
class MultilayerNN():
    ''' Fully connected multilayer NN. '''
    input = attr.ib()
    inputSize = attr.ib()
    outputSize = attr.ib()
    hiddenLayers = attr.ib([10])  # sizes of hidden layers
Пример #3
0
  Turn lables from integers to 1-hot encoding.
  :return:
  '''
  dataset = dataset.reshape((-1, image_size * image_size)).astype(np.float32)
  # Map 0 to [1.0, 0.0, 0.0 ...], 1 to [0.0, 1.0, 0.0 ...]
  labels = (np.arange(num_labels) == labels[:,None]).astype(np.float32)
  return dataset, labels

def reformatDataset(ds, reformatter):
    new = {}
    new['train_dataset'], new['train_labels'] = reformatter(ds['train_dataset'], ds['train_labels'])
    new['valid_dataset'], new['valid_labels'] = reformatter(ds['valid_dataset'], ds['valid_labels'])
    new['test_dataset'], new['test_labels'] = reformatter(ds['test_dataset'], ds['test_labels'])
    return new

def reformatConv(dataset, labels, image_size = 28, num_labels = 10, num_channels = 1):
    '''
    Reformat MNIST dataset for convolutional networks, turning images to 3d
        arrays by adding additional 'channel' dimension.
    '''
    dataset = dataset.reshape((-1, image_size, image_size, num_channels)).astype(np.float32)
    labels = (np.arange(num_labels) == labels[:,None]).astype(np.float32)
    return dataset, labels

if __name__ == '__main__':
    ds = loadDataset()
    #ds = loadDataset('notMNIST_reformatted_1d_images.pickle')
    #printDsetShapes(ds)
    # nds = reformatDataset(ds, reformatConv)
    # printDsetShapes(nds)
    # saveDataset(nds, 'notMNIST_reformatted_conv.pickle')