Exemplo n.º 1
0
def run_mnist(n_train=None, n_test=None, model_type="normal"):
    datasize = {"n_train": n_train, "n_test": n_test}
    transformer_params = {
        "image_shape": 28,
        "filter_shape_l1": 5, "step_shape_l1": 1, "n_l1_output": 8,
        "filter_shape_l2": 5, "step_shape_l2": 1, "n_l2_output": 4,
        "filter_shape_pooling": 5, "step_shape_pooling": 5
    }
    ensemble_params = {
        "n_estimators" : 40,
        "sampling_ratio" : 0.03,
        "n_jobs" : -1
    }
    DataPath='/git/data'
    batch_size=1
    dataset="MNIST"
    train_loader, test_loader = data_loading(DataPath,dataset,batch_size)
    run(dataset, datasize, transformer_params, ensemble_params, model_type)
Exemplo n.º 2
0
            i += 1
        else:
            images_test = np.concatenate(
                (images_test, np.swapaxes(images.numpy(), 3, 1)), axis=0)
            y_test = np.concatenate((y_test, labels.numpy()), axis=0)
    #images_test, y_test = test_set

    X_test = pcanet.transform(images_test)
    y_pred = classifier.predict(X_test)
    return y_pred, y_test


if __name__ == "__main__":
    DataPath = '/git/data'

    train_loader, test_loader = data_loading(DataPath, args.dataset,
                                             args.batchSize)

    if args.gpu >= 0:
        torch.cuda.set_device(args.gpu)

    if args.mode == "train":
        print("Training the model...")
        pcanet, classifier = train(train_loader, args.dataset, args.Numlayers)
        save_model(pcanet, args.dataset + "_pcanet.pkl")
        save_model(classifier, args.dataset + "_classifier.pkl")
        print("Model saved")

    elif args.mode == "test":
        pcanet = load_model(args.dataset + "_pcanet.pkl")
        classifier = load_model(args.dataset + "_classifier.pkl")
Exemplo n.º 3
0
def load_mnist(DataPath, dataset, batch_size):
    train_loader, test_loader = data_loading(DataPath, dataset, batch_size)

    return train_loader, test_loader
Exemplo n.º 4
0
        batch_size = 250
        num_classes = 10
        num_epochs = 1
        learning_rate = 0.01
        channels = 1

    if datasets == 'CIFAR10':
        input_size = 28 * 28
        hidden_size = 400
        batch_size = 250
        num_classes = 10
        num_epochs = 1
        channels = 3
        learning_rate = 0.01

    [train_loader, test_loader] = data_loading(DataPath, datasets, batch_size)
    net1 = ConvNet1(channels, hidden_size)
    #     values=[0.3 ,  0.4 , 0.5 ,  0.6 ,  0.7]

    #     values =np.arange(0.9,0.5,-0.1)
    #     values=[0.06,0.08,0.1,0.12,0.14]
    #     start=0.65
    #values =[ 0.5, 0.4, 0.3,0.2,0.1]
    #start=0.8
    #     values =[0.3, 0.35, 0.4, 0.45,0.5,0.55]

    Error = []
    #for p in range(len(values)):

    start = [0.95, 0.9, 0.85, 0.8, 0.75, 0.7, 0.65]
    values = [0.69, 0.59, 0.49, 0.39, 0.29, 0.19, 0.09]
Exemplo n.º 5
0
def load_cifar():
    train, test = data_loading(DataPath, 'CIFAR10', 1)
    return reshape_dataset(train, test)