コード例 #1
0
ファイル: train2.py プロジェクト: chriscremer/Other_Code
    write_to_file = exp_dir + 'exp_stdout.txt'
###########################################################################################

###########################################################################################
# LOAD DATA

if args.dataset == 'clevr':
    # # CLEVR DATA
    dataset = load_clevr(batch_size=args.batch_size,
                         vws=args.vws,
                         quick=args.quick)

elif args.dataset == 'cifar':
    # CIFAR DATA
    # train_image_dataset = load_cifar(data_dir=args.data_dir)
    dataset = load_cifar(data_dir=home + '/Documents/')

print(len(dataset), dataset[0].shape)
###########################################################################################

###########################################################################################
# Init Model
# ------------------------------------------------------------------------------
sampling_batch_size = 64
shape = dataset[0].shape
model = Glow_((sampling_batch_size, shape[0], shape[1], shape[2]), args).cuda()
# print(model)
print("number of model parameters:",
      sum([np.prod(p.size()) for p in model.parameters()]))
# fasdfad
# model = nn.DataParallel(model).cuda()
コード例 #2
0
ファイル: train9.py プロジェクト: chriscremer/Other_Code
    # # CLEVR DATA
    if args.machine in ['vws', 'vector', 'vaughn']:
        data_dir = home + "/vl_data/two_objects_large/"  #vws
    else:
        data_dir = home + "/VL/data/two_objects_no_occ/"  #boltz

    train_x, test_x = load_clevr(batch_size=args.batch_size,
                                 data_dir=data_dir,
                                 quick=args.quick)
    shape = train_x[0].shape

elif args.dataset == 'cifar':
    # CIFAR DATA
    # train_image_dataset = load_cifar(data_dir=args.data_dir)
    train_x, test_x = load_cifar(data_dir=home + '/Documents/',
                                 dataset_size=args.dataset_size)
    shape = train_x[0].shape

    # print (len(test_x), 'test set len')
    svhn_test_x = load_svhn(data_dir=home + '/Documents/')
    # svhn_test_x = test_x

# dataset = train_x
elif args.dataset == 'flickr':

    train_x, test_x = load_flickr(
        data_dir='/scratch/gobi1/ccremer/Flickr_Faces/images1024x1024/',
        data_dir_test=
        '/scratch/gobi1/ccremer/Flickr_Faces/images1024x1024_test/',
        dataset_size=args.dataset_size)
コード例 #3
0
ファイル: main.py プロジェクト: pinouche/crossover
    else:
        result_list = average_weights_crossover(crossover, data, x_train,
                                                y_train, x_test, y_test,
                                                num_transplants,
                                                batch_size_activation,
                                                batch_size_sgd, work_id)

    return result_list


if __name__ == "__main__":

    data = "cifar10"

    if data == "cifar10":
        x_train, x_test, y_train, y_test = load_cifar()
    elif data == "cifar100":
        x_train, x_test, y_train, y_test = load_cifar_100()
    elif data == "mnist":
        x_train, x_test, y_train, y_test = load_mnist()

    num_processes = 1

    start = timer()

    pair_list = [pair for pair in range(num_processes)]

    results = crossover_offspring(data, x_train, y_train, x_test, y_test,
                                  pair_list)

    pickle.dump(results, open("crossover_results.pickle", "wb"))
コード例 #4
0
ファイル: main.py プロジェクト: aymene98/auto-CV
print("**************** Training on MNIST *****************")
model = Model()
model.train(x_train_mnist, y_train_mnist)
_ = model.test(x_test_mnist, y_test_mnist)

# To empty the RAM
del x_train_mnist
del y_train_mnist
del x_test_mnist
del y_test_mnist
del model

time.sleep(5)

print("Loading CIFAR-100 ...")
x_train_cifar, y_train_cifar, x_test_cifar, y_test_cifar = load_cifar()
print("Done")

# Training for the cifar images.
print("**************** Training on CIFAR-100 *****************")
model = Model()
model.train(x_train_cifar, y_train_cifar)
_ = model.test(x_test_cifar, y_test_cifar)

# To empty the RAM
del x_train_cifar
del y_train_cifar
del x_test_cifar
del y_test_cifar
del model
コード例 #5
0
# -*- coding: utf-8 -*-
from __future__ import division

import tensorflow as tf
import numpy as np
import os
import shutil
import time

import load_data

x_train, x_validation, x_test, y_train, y_validation, y_test \
    = load_data.load_cifar('./data/cifar/', seed=0, as_image=True, scaling=True)

BOARD_PATH = "./board/lab08-10_board"
INPUT_DIM = np.size(x_train, 1)
NCLASS = len(np.unique(y_train))
BATCH_SIZE = 32

TOTAL_EPOCH = 100
ALPHA = 0
INIT_LEARNING_RATE = 0.001

ntrain = len(x_train)
nvalidation = len(x_validation)
ntest = len(x_test)

image_width = np.size(x_train, 1)
image_height = np.size(x_train, 2)
n_channels = np.size(x_train, 3)
コード例 #6
0
import load_data
import plot_helper

x_train, x_validation, x_test, y_train, y_validation, y_test=load_data.load_cifar('./data/cifar/', seed=0, as_image=True, scaling=True)
plot_helper.plot_cifar(x_train, 100)

x_train, x_validation, x_test, y_train, y_validation, y_test=load_data.load_mnist('./data/cmnist/', seed=0, as_image=True, scaling=True)
plot_helper.plot_mnist(x_train, 100)