def __init__(self): super(Cifar10ConvNet, self).__init__() self.conv1 = pytk.Conv2d(3, 32, 3, padding=1) self.conv2 = pytk.Conv2d(32, 64, 3, padding=1) self.conv3 = pytk.Conv2d(64, 128, 3, padding=1) self.fc1 = pytk.Linear(4 * 4 * 128, 512) self.out = pytk.Linear(512, NUM_CLASSES)
def __init__(self): super(MNISTConvNet2, self).__init__() self.convNet = nn.Sequential( pytk.Conv2d(1, 128, kernel_size=3), nn.ReLU(), nn.MaxPool2d(2), nn.Dropout(p=0.20), pytk.Conv2d(128, 64, kernel_size=3), nn.ReLU(), nn.MaxPool2d(2), nn.Dropout(p=0.10), nn.Flatten(), pytk.Linear(7 * 7 * 64, 512), nn.ReLU(), nn.Dropout(p=0.20), pytk.Linear(512, NUM_CLASSES) )
def __init__(self, lr): super(MNISTModel, self).__init__() self.convNet = nn.Sequential(pytk.Conv2d(1, 128, kernel_size=3), nn.ReLU(), nn.MaxPool2d(2), nn.Dropout(p=0.20), pytk.Conv2d(128, 64, kernel_size=3), nn.ReLU(), nn.MaxPool2d(2), nn.Dropout(p=0.10), nn.Flatten(), pytk.Linear(7 * 7 * 64, 512), nn.ReLU(), nn.Dropout(p=0.20), pytk.Linear(512, NUM_CLASSES)) self.lr = lr self.loss_fn = nn.CrossEntropyLoss() self.train_acc = tm.Accuracy() self.val_acc = tm.Accuracy() self.train_batch_losses = [] self.val_batch_losses = [] self.train_batch_accs = [] self.val_batch_accs = [] self.history = {"loss": [], "acc": [], "val_loss": [], "val_acc": []} self.log_file = open(os.path.join(os.getcwd(), 'mnist_log.txt'), 'w')
def __init__(self): super(MNISTConvNet, self).__init__() self.conv1 = pytk.Conv2d(1, 128, kernel_size=3) self.conv2 = pytk.Conv2d(128, 64, kernel_size=3) self.fc1 = pytk.Linear(7 * 7 * 64, 512) self.out = pytk.Linear(512, NUM_CLASSES)