def __init__(self, num_classes): super(Discriminator, self).__init__() self.net = nn.Sequential( nn.Dropout(.2), weight_norm(nn.Conv2d(3, 96, 3, stride=1, padding=1)), nn.LeakyReLU(), weight_norm(nn.Conv2d(96, 96, 3, stride=1, padding=1)), nn.LeakyReLU(), weight_norm(nn.Conv2d(96, 96, 3, stride=2, padding=1)), nn.LeakyReLU(), nn.Dropout(.5), weight_norm(nn.Conv2d(96, 192, 3, stride=1, padding=1)), nn.LeakyReLU(), weight_norm(nn.Conv2d(192, 192, 3, stride=1, padding=1)), nn.LeakyReLU(), weight_norm(nn.Conv2d(192, 192, 3, stride=2, padding=1)), nn.LeakyReLU(), nn.Dropout(.5), weight_norm(nn.Conv2d(192, 192, 3, stride=1, padding=0)), nn.LeakyReLU(), weight_norm(nn.Conv2d(192, 192, 1, stride=1, padding=0)), nn.LeakyReLU(), weight_norm(nn.Conv2d(192, 192, 1, stride=1, padding=0)), nn.LeakyReLU(), # nn.AvgPool2d(6,stride=1), nn.AdaptiveAvgPool2d(1), helpers.Flatten()) self.fc = weight_norm(nn.Linear(192, num_classes))
def __init__(self, latent_dim): super(Generator, self).__init__() self.net = nn.Sequential(helpers.Flatten(), nn.Linear(latent_dim, 500), nn.Softplus(), nn.BatchNorm1d(500), nn.Linear(500, 500), nn.Softplus(), nn.BatchNorm1d(500), nn.Linear(500, 32 * 32), nn.Tanh(), helpers.Reshape((1, 32, 32)))
def __init__(self, num_classes): super(Discriminator, self).__init__() self.net = nn.Sequential( nn.Conv2d(1, 64, 3, stride=1, padding=1), nn.LeakyReLU(), nn.Conv2d(64, 96, 3, stride=2, padding=1), nn.LeakyReLU(), nn.Dropout(.2), nn.Conv2d(96, 96, 3, stride=1, padding=1), nn.LeakyReLU(), nn.Conv2d(96, 192, 3, stride=2, padding=1), nn.LeakyReLU(), nn.Dropout(.2), nn.Conv2d(192, 192, 3, stride=2, padding=1), nn.LeakyReLU(), nn.Conv2d(192, 192, 1, stride=1, padding=0), nn.LeakyReLU(), nn.Conv2d(192, 192, 1, stride=1, padding=0), nn.LeakyReLU(), nn.MaxPool2d(4, stride=1), helpers.Flatten()) self.fc = nn.Linear(192, num_classes)
def __init__(self, num_classes): super(Discriminator, self).__init__() self.net = nn.Sequential( helpers.Flatten(), nn.Linear(32 * 32, 1000), nn.Dropout(.1), nn.ReLU(), nn.Linear(1000, 500), nn.Dropout(.1), nn.ReLU(), nn.Linear(500, 250), nn.Dropout(.1), nn.ReLU(), nn.Linear(250, 250), nn.Dropout(.1), nn.ReLU(), nn.Linear(250, 250), nn.Dropout(.1), nn.ReLU(), nn.Linear(250, num_classes), )