Beispiel #1
0
    def setUp(self):
        # Layer size
        self.n_in = 28 * 28 # Input size
        self.n_h1 = 1000 # Hidden Layer 1 size
        self.n_h2 = 1000 # Hidden Layer 2 size
        self.n_out = 10 # Output size

        # self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
        self.device = torch.device("cpu")
        self.model = model.FullyConnected(self.n_in, self.n_h1, self.n_h2, self.n_out, device=self.device)
Beispiel #2
0
                                          shuffle=False)

# NOTE: Don't change these settings
device = "cuda:0" if torch.cuda.is_available() else "cpu"

# NOTE: Don't change these settings
# Layer size
N_in = 28 * 28  # Input size
N_h1 = 256  # Hidden Layer 1 size
N_h2 = 256  # Hidden Layer 2 size
N_out = 10  # Output size
# Learning rate
lr = 0.001

# init model
net = model.FullyConnected(N_in, N_h1, N_h2, N_out, device=device)

# TODO: Define number of epochs
N_epoch = 7  # Or keep it as is

# TODO: Training and Validation Loop
# >>> for n epochs
## >>> for all mini batches
### >>> net.train(...)
## at the end of each training epoch
## >>> net.eval(...)

for epoch in range(N_epoch):
    for batch_idx, (inputs, labels) in enumerate(train_loader):
        inputs = inputs.view(-1, N_in)
        net.train(inputs, labels, lr)
Beispiel #3
0

""" Bathc len code does not work with enumerate
count=1
for i in train_data_loader:
    print(i[1].size())
    batch_len=list(i[1].size())
    print(batch_len[0])
    image_printer_elemental(i[0],i[1],batch_len[0])
    if count == 5:
        break
    count=count+1

"""

net = model.FullyConnected(28 * 28, 256, 256, 10)

EPOCHS = 1
""" TO view images and test the loops
for i in range(EPOCHS):
    for batch_idx,(data,label) in enumerate(train_data_loader):
        for img_tensor,label_tensor in zip(data,label):
            image_printer_elemental(img_tensor,label_tensor)
        break     
"""
#small tester block
"""
for i in range(EPOCHS):
    for batch_idx,(inputs,label) in enumerate(train_data_loader): 
        #print("Inputs shape",inputs.size())
        inputs_size_arr=list(inputs.size())