示例#1
0
print(name)
print(max_iter)
#sys.exit()

CV_SPLITS = 10

# LOAD DATA
#dsBunch = ds.load('iris')
#data = train_test_split(dsBunch.data, dsBunch.target, test_size=0.25, random_state=1)

# dsBunch = ds.load_mnist_back()
# dsTest = ds.load_mnist_back_test()
# data = (dsBunch.data, dsTest.data, dsBunch.target, dsTest.target)

X_train, y_train = ds.load_mnist('data')
X_test, y_test = ds.load_mnist('data', kind='t10k')
data = (X_train[:10000], X_test, y_train[:10000], y_test)

# #print('DATA:')
#n_features = dsBunch.data.shape[1]
#shp = dsBunch.data.shape
#print(pd.DataFrame(dsTest.data).head)
#print()
#print(pd.DataFrame(dsTest.target).head)
#print('n_features: {}\nshape: {}\n'.format(n_features, shp))

# DEFINE PARAM GRIDS
# d_features = n_features//2
# hls = [(d_features,)*3, (n_features,)*3, (d_features,)*2, (n_features,)*2, (d_features,), (n_features,),]
hls = [
示例#2
0
#############################
### Preliminaries
#############################

# Retrieve the arguments from the command-line
args = parseArgs()

# Fix the seed for the random generator
np.random.seed(seed=0)

#############################
### Dataset Handling
#############################

### Load the dataset
train_set, valid_set, test_set = dataset_loader.load_mnist()

### Define the dataset variables
n_training = train_set[0].shape[0]
n_feature = train_set[0].shape[1]
n_label = np.max(train_set[1])+1

#############################
### Neural Network parameters
#############################

### Activation function
act_func_name = args.act_func

### Network Architecture
nn_arch = np.array([n_feature] + args.arch + [n_label])
示例#3
0
            *convblock(128, 256, 4, 2, 1),
            nn.Conv2d(256, 1, 4, 1, 0, bias=False), # FC with Conv.
            nn.Sigmoid()
        )

    def forward(self, img):
        prob = self.model(img)
        return prob


assert (opt.dataset == 'cifar10' or opt.dataset == 'mnist'), 'Unknown dataset! Only cifar10 and mnist are supported.'

if opt.dataset == 'cifar10':
    batch_iterator = DataLoader(load_cifar10(opt.img_size), shuffle=True, batch_size=opt.batch_size) # List, NCHW format.
elif opt.dataset == 'mnist':
    batch_iterator = DataLoader(load_mnist(opt.img_size), shuffle=True, batch_size=opt.batch_size) # List, NCHW format.

# Save a batch of real images for reference.
os.makedirs('./out', exist_ok=True)
save_image(next(iter(batch_iterator))[0][:25, ...], './out/real_samples.png', nrow=5, normalize=True)


cuda = torch.cuda.is_available()
Tensor = torch.cuda.FloatTensor if cuda else torch.FloatTensor
gan_loss = nn.BCELoss()

generator = Generator()
discriminator = Discriminator()

optimizer_D = optim.RMSprop(discriminator.parameters(), lr=opt.lr)
optimizer_G = optim.RMSprop(generator.parameters(), lr=opt.lr)