Exemplo n.º 1
0
def evaluate_image(load_checkpoint):
    """
    Creates a convolutional network, optionally loads its weights from file, and
    runs it on a random test image from CIFAR-10. Visualizes top predictions.

    Args:
        load_checkpoint: Boolean flag indicating if weights should be loaded from file.
    """
    # Load data from file into memory.
    class_names, _, _, images_test, labels_test = load_cifar10()

    # Create network.
    my_model = model.Model()

    # Create runner, optionally load a weights from file.
    my_runner = runner.Runner(model=my_model)
    if load_checkpoint:
        my_runner.load(os.path.join(".", "checkpoints", "my_model"))

    # Evaluate network on a random test image.
    image_index = np.random.randint(0, images_test.shape[0])
    image = images_test[image_index]
    label = labels_test[image_index]
    guess_class, guess_prob = my_runner.run(image)

    # Visualize the result.
    visualize_classification(image, label, guess_class, guess_prob, class_names)
Exemplo n.º 2
0
def get_cifar_data(num_training=49000, num_validation=1000, num_test=1000):
    x_train, y_train, x_test, y_test = load_cifar10(
        '..\斯坦福CS231N\cifar-10-batches-py')
    #验证集
    mask = range(num_training, num_training + num_validation)
    x_val = x_train[mask]
    y_val = y_train[mask]
    #训练集
    mask = range(num_training)
    x_train = x_train[mask]
    y_train = y_train[mask]
    #测试集
    mask = range(num_test)
    x_test = x_test[mask]
    y_test = y_test[mask]
    #求平均值,归一化
    mean_image = np.mean(x_train, axis=0)
    x_train -= mean_image
    x_val -= mean_image
    x_test -= mean_image
    #展开
    x_train = x_train.reshape(num_training, -1)
    x_val = x_val.reshape(num_validation, -1)
    x_test = x_test.reshape(num_test, -1)

    return x_train, y_train, x_val, y_val, x_test, y_test
Exemplo n.º 3
0
def prepare_dataset():
    """
    Prepares CIFAR-10 dataset.
    - 50,000 development images to be used for building the model.
    - 10,000 test images to be used for evaluating the final model.
    - Images are 32 x 32 pixels in size, with 3 channels (R, G, and B, in that order).
    - Each image is labeled with one of 10 classes.
    """
    # Download data to local storage.
    download_cifar10()

    # Load data from file into memory.
    class_names, images_dev, labels_dev, images_test, labels_test = load_cifar10()

    # Visualize some random examples from each class.
    visualize_dataset_sample(images_dev, labels_dev, sample_size=10)
Exemplo n.º 4
0
def evaluate_dataset(load_checkpoint):
    """
    Creates a convolutional network, optionally loads its weights from file, and
    runs it on the whole CIFAR-10 test set. Calculates loss and accuracy on the
    test set.

    Args:
        load_checkpoint: Boolean flag indicating if weights should be loaded from file.
    """
    # Load data from file into memory.
    class_names, _, _, images_test, labels_test = load_cifar10()

    # Create network.
    my_model = model.Model()

    # Create trainer, optionally load a weights from file.
    my_trainer = trainer.Trainer(model=my_model, learning_rate=5e-4)
    if load_checkpoint:
        my_runner.load(os.path.join(".", "checkpoints", "my_model"))

    # Evaluate network on test set.
    my_trainer.evaluate(images_test, labels_test, batch_size=64, print_every=100)
Exemplo n.º 5
0
def train():
    """
    Trains a convolutional network on CIFAR-10 for one epoch, and saves the resulting model.
    """
    # Load data from file into memory.
    _, images_dev, labels_dev, images_test, labels_test = load_cifar10()

    # Take a random 10% of development data for validation. Use the rest for training.
    # Randomization is used to achieve similar distributions for training and validation data.
    images_train, labels_train, images_val, labels_val = train_val_split(images_dev, labels_dev, val_fraction=0.1)

    # Create network.
    my_model = model.Model()

    # Train network for 5 epochs.
    my_trainer = trainer.Trainer(model=my_model, learning_rate=5e-4)
    my_trainer.train(images_train, labels_train, images_val, labels_val, batch_size=64, epochs=5, print_every=100)

    # Evaluate network on test set.
    my_trainer.evaluate(images_test, labels_test, batch_size=64, print_every=100)

    # Save the resulting model.
    my_trainer.save(os.path.join(".", "checkpoints", "my_model"))
Exemplo n.º 6
0
import numpy as np
from data_utils import load_cifar10
import matplotlib.pyplot as plt
import time
from knn import KNearestNeighbor

#3.1 ) 将数据载入模型
x_train, y_train, x_test, y_test = load_cifar10(
    'cifar-10-batches-py')  #这里调用数据处理的函数data_utils.py

print('training data shape:', x_train.shape)
print('training labels shape:', y_train.shape)
print('test data shape:', x_test.shape)
print('test labels shape:', y_test.shape)

# 这50000张训练集每一类中随机挑选samples_per_class张图片
classes = [
    'plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship',
    'truck'
]
num_claesses = len(classes)
samples_per_class = 10
for y, cls in enumerate(classes):
    """
    flatnonzero(): 该函数输入一个矩阵,返回扁平化后矩阵中非零元素的位置(index)
    >>> x = np.arange(-2, 3)  得到 array([-2, -1, 0, 1, 2])
    >>> np.flatnonzero(x)    得到array([0, 1, 3, 4]) 这几个序列位置为非零
    """
    print("----------------我是分割线初始----------------")
    print("y的值: ", y)
    print("y_train的值: ", y_train)
Exemplo n.º 7
0
import numpy as np
from data_utils import load_cifar10
import matplotlib.pyplot as plt
from knn import  KNearestNeighbor

x_train,y_train,x_test,y_test = load_cifar10('cifar-10-batches-py')

classes=['plane','car','bird','cat','deer','dog','frog','horse','ship','truck']
num_claesses=len(classes)
samples_per_class=7

num_training = 5000
mask = range(num_training)#(0,5000),step=1
x_train = x_train[mask] #5000*32*#2*3
y_train = y_train[mask]
num_test = 500
mask = range(num_test)
x_test = x_test[mask]
y_test = x_test[mask]

x_train = np.reshape(x_train,(x_train.shape[0],-1))
x_test = np.reshape(x_test,(x_test.shape[0],-1))

classifier = KNearestNeighbor()
classifier.train(x_train,y_train)


#比较准确率
#dists = classifier.compute_distance_two_loops(x_test)
dists = classifier.compute_distance_one_loops(x_test)
y_test_pred = classifier.predict_labels(dists,k=1)
Exemplo n.º 8
0
import numpy as np
from data_utils import load_cifar10
import matplotlib.pyplot as plt
from knn import KNearestNeighbor
import sys
sys.path.append('..')
x_train, y_train, x_test, y_test = load_cifar10(
    '..\斯坦福CS231N\cifar-10-batches-py')
# 为了验证我们的结果是否正确,我们可以打印输出下:
print('training data shape:', x_train.shape)
print('training labels shape:', y_train.shape)
print('test data shape:', x_test.shape)
print('test labels shape:', y_test.shape)
# 共有50000张训练集,10000张测试集。
# 下面我们从这50000张训练集每一类中随机挑选samples_per_class张图片进行展示,代码如下:代码如下
classes = [
    'plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship',
    'truck'
]
num_claesses = len(classes)
samples_per_class = 7
for y, cls in enumerate(classes):
    idxs = np.flatnonzero(y_train == y)
    idxs = np.random.choice(idxs, samples_per_class, replace=False)
    for i, idx in enumerate(idxs):
        plt_idx = i * num_claesses + y + 1  #其中一个图片所在的位置
        plt.subplot(samples_per_class, num_claesses, plt_idx)  #激活图片位置的画板
        plt.imshow(x_train[idx].astype('uint8'))
        plt.axis('off')
        if i == 0:
            plt.title(cls)
Exemplo n.º 9
0
import matplotlib.pyplot as plt

plt.switch_backend('agg')
parser = argparse.ArgumentParser()
parser.add_argument("--best", help="Train svm classifier with pre-determined parameters and report test set accuracy", action="store_true")
parser.add_argument("--search_c", help="Search for choice of regularization factor", action="store_true")
parser.add_argument("--mean_sub", help="Perform mean subtraction on the features", action="store_true")
parser.add_argument("--scaling", help="Scale features to [0 1] range", action="store_true")
parser.add_argument("--pca", help="Perform PCA on features", action="store_true")
parser.add_argument("--lda", help="Perform LDA on features", action="store_true")

args = parser.parse_args()

# Load dataset
Xtr, Ytr, Xte, Yte = data_utils.load_cifar10('../cifar-10-batches-py')
num_train_samples = Xtr.shape[0]
num_test_samples = Xte.shape[0]

# Unit scaling and mean subtraction
if(args.scaling):
    print('Performing feature scaling to [0 1]')
    Xtr = Xtr/255
    Xte = Xte/255

if(args.mean_sub):
    print('Performing mean subtraction on the samples')
    mean_image = np.mean(Xtr,axis=0)
    Xtr = Xtr - mean_image
    Xte = Xte - mean_image
Exemplo n.º 10
0
from __future__ import absolute_import
import sys
sys.path.append("..")
import random 
import numpy as np
from data_utils import load_cifar10
from SVM19 import *
from softmax import *
from gradient_check import grad_check_sparse
from linear_classifier import LinearSVM
import time

cifar10_dir='cifar-10-batches-py'
x_train,y_train,x_test,y_test=load_cifar10(cifar10_dir)
#抽样
num_training=49000
num_validation=1000
num_test=1000
num_dev=500
mask=range(num_training,num_training+num_validation)
x_val=x_train[mask]
y_val=y_train[mask]
mask=range(num_training)
x_train=x_train[mask]
y_train=y_train[mask]
mask=np.random.choice(num_training,num_dev,replace=False)
x_dev=x_train[mask]
y_dev=y_train[mask]
mask=range(num_test)
x_test=x_test[mask]
y_test=y_test[mask]