Exemplo n.º 1
0
def load_images(path= "/fashion_mnist/data/fashion", kind='train'): 

    mnist_path = os.getcwd() + path
    labels = ['t_shirt_top', 'trouser', 'pullover', 'dress', 'coat', 'sandal', 'shirt', 'sneaker', 'bag', 'ankle_boots']
    # reading the data using the utility function load_mnist
    # train_images, train_labels  = load_mnist(mnist_path, kind='train')
    # t10k_images, t10k_labels  = load_mnist(mnist_path, kind='t10k')
    # train_labels_str = image_class_to_str(train_labels)
    # t10k_labels_str = image_class_to_str(t10k_labels)

    return load_mnist(mnist_path, kind=kind)
from fashion_mnist.utils.mnist_reader import load_mnist
import numpy as np
import statistics as stat
import matplotlib.pyplot as plt
import math
import pickle

image, label = load_mnist('./fashion_mnist/data/mnist/')
all_img = image.copy()
all_lbl = label.copy()

t_images, t_labels = load_mnist('./fashion_mnist/data/mnist/', kind='t10k')
all_test_images = t_images.copy()
all_test_labels = t_labels.copy()

digit_type = [1, 8]  # pos 1, negt 8

N = 2  # no. of classes
cm_2 = np.zeros((N, N))

classes = list(range(N))
unq, cnt = np.unique(
    all_lbl,
    return_counts=True)  # no. of data points of a class in training set
unq1, cnt1 = np.unique(
    all_test_labels,
    return_counts=True)  # no. of data points in test set of the same class
print(unq, cnt)
print(unq1, cnt1)
class_priors = cnt / len(all_lbl)  # cnt : No. of data points of each digit
total_train_points = cnt[digit_type[0]] + cnt[digit_type[1]]
Exemplo n.º 3
0
def load_mnist_dataset():
    dir_loc = '/home/kamil/PycharmProjects/image-recognition-msid/fashion_mnist/data/fashion'
    train_set = mnist_reader.load_mnist(dir_loc, kind='train')
    test_set = mnist_reader.load_mnist(dir_loc, kind='t10k')

    return train_set, test_set
Exemplo n.º 4
0
def get_data():
    """
    Get data ready to learn with.

    Returns
    -------
    dict
    """

    simple = False
    if simple:
        from sklearn.datasets import load_digits
        from sklearn.utils import shuffle
        digits = load_digits()
        x = [np.array(el).flatten() for el in digits.images]
        y = digits.target

        x = np.divide(x, 255.0) * 2 - 1

        x, y = shuffle(x, y, random_state=0)

        from sklearn.cross_validation import train_test_split

        x_train, x_test, y_train, y_test = train_test_split(x,
                                                            y,
                                                            test_size=0.33,
                                                            random_state=42)

        data = {
            'train': {
                'X': x_train,
                'y': y_train
            },
            'test': {
                'X': x_test,
                'y': y_test
            }
        }
    else:
        # from sklearn.datasets import fetch_mldata
        from sklearn.utils import shuffle
        # mnist = fetch_mldata("MNIST original")
        # x = mnist.data
        # y = mnist.target

        # x = x / 255.0 * 2 - 1
        #
        # x, y = shuffle(x, y, random_state=0)
        #
        # from sklearn.cross_validation import train_test_split
        #
        # x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.33, random_state=42)

        X_train, y_train = m_reader.load_mnist('fashion_mnist/data/fashion',
                                               kind='train')
        X_test, y_test = m_reader.load_mnist('fashion_mnist/data/fashion',
                                             kind='t10k')

        X_test = X_test / 255.0 * 2 - 1
        X_train = X_train / 255.0 * 2 - 1

        data = {
            'train': {
                'X': X_train,
                'y': y_train
            },
            'test': {
                'X': X_test,
                'y': y_test
            }
        }
    return data
Exemplo n.º 5
0
from vis.visualization import visualize_activation
from vis.utils import utils

config=tf.ConfigProto()
config.gpu_options.allow_growth = True
set_session(tf.Session(config=config))

batch_size = 400
num_classes = 10
epochs = 20

# input image dimensions
img_rows, img_cols = 28, 28

# the data, shuffled and split between train and test sets
x_train, y_train = mnist_reader.load_mnist('fashion_mnist/data/fashion', kind='train')
x_test, y_test = mnist_reader.load_mnist('fashion_mnist/data/fashion', kind='t10k')

if K.image_data_format() == 'channels_first':
    x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)
    x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)
    input_shape = (1, img_rows, img_cols)
else:
    x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)
    x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)
    input_shape = (img_rows, img_cols, 1)

x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
Exemplo n.º 6
0
from io import BytesIO
from timeit import default_timer as timer

import numpy as np
import tmap as tm
from faerun import Faerun
from PIL import Image

from fashion_mnist.utils import mnist_reader

# Coniguration for the tmap layout
CFG = tm.LayoutConfiguration()
CFG.node_size = 1 / 55

# Load fashion mnist data
IMAGES_TRAIN, LABELS_TRAIN = mnist_reader.load_mnist(
    "fashion_mnist/data/fashion", kind="train")
IMAGES_TEST, LABELS_TEST = mnist_reader.load_mnist(
    "fashion_mnist/data/fashion", kind="t10k")

IMAGES = np.concatenate((IMAGES_TRAIN, IMAGES_TEST))
LABELS = np.concatenate((LABELS_TRAIN, LABELS_TEST))
IMAGE_LABELS = []


def main():
    """ Main function """

    # Initialize and configure tmap
    dims = 1024
    enc = tm.Minhash(28 * 28, 42, dims)
    lf = tm.LSHForest(dims * 2, 128)
Exemplo n.º 7
0
learning_rate = 7e-3
batch_size = 256
n_epochs = 100
n_train = None
n_test = None
lamda = 0

# Step 1: Read in data
fmnist_folder = './fashion_mnist'
#Create dataset load function [Refer fashion mnist github page for util function]
#Create train,validation,test split
#train, val, test = utils.read_fmnist(fmnist_folder, flatten=True)

# Step 2: Create datasets and iterator
# create training Dataset and batch it
x_train,y_train = load_mnist(os.path.join("fashion_mnist","data","fashion"),"train")
x_train=x_train/255.0

x_train, x_val, y_train, y_val = train_test_split(x_train, y_train, test_size=0.2, random_state=random_seed)


print(x_train.shape,y_train.shape)
n_train = x_train.shape[0]
print(x_val.shape,y_val.shape)


# create testing Dataset and batch it
x_test,y_test = load_mnist(os.path.join("fashion_mnist","data","fashion"),"t10k")
x_test=x_test/255.0
print(x_test.shape,y_test.shape)
n_test = x_test.shape[0]