model.add(Activation('softmax')) # Learning rule optimizer = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True) model.compile(loss='categorical_crossentropy', optimizer=optimizer) # Training print('Start training...') nb_epochs = 10 batch_size = 100 try: for epoch in range(nb_epochs): print('Epoch', epoch + 1, '/', nb_epochs) tstart = time() # Training batch_generator = db.generate_batches(batch_size, dset='train') nb_train_batches = 0 train_loss = 0.0 train_accuracy = 0.0 for X, y in batch_generator: loss, accuracy = model.train_on_batch(X, y, accuracy=True) train_loss += loss train_accuracy += accuracy nb_train_batches += 1 # Validation batch_generator = db.generate_batches(batch_size, dset='val') nb_val_batches = 0 val_loss = 0.0 val_accuracy = 0.0 for X, y in batch_generator: loss, accuracy = model.test_on_batch(X, y, accuracy=True)
testAcc = [] testLogs = open("testRecognizerLogs.txt", "a") # Check if a previously saved model exists myNetwork = Path("recognizer.pt") if myNetwork.is_file(): print("Saved model exists...!!!") network = torch.load("recognizer.pt") if torch.cuda.is_available(): network.cuda() ######################################### ############TEST THE MODEL############### ######################################### for inputs_test, labels_test in db.generate_batches(batch_size=100, dset='test', rest=False): inputs_test = torch.from_numpy(inputs_test) labelValues = [labels.tolist().index(1) for labels in labels_test] labelValues = np.array(labelValues) labels_test = torch.from_numpy(labelValues) if torch.cuda.is_available(): inputs_test, labels_test = Variable(inputs_test.cuda()), Variable( labels_test.cuda()) else: inputs_test, labels_test = Variable(inputs_test), Variable( labels_test) outputs_test = network(inputs_test) _, predictions_test = torch.max(outputs_test.data, 1) # print('labelValues:{}'.format(labels_val.data)) # print('predictions:{}'.format(predictions))
model.add(Activation('softmax')) # Learning rule optimizer = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True) model.compile(loss='categorical_crossentropy', optimizer=optimizer) # Training print('Start training...') nb_epochs = 10 batch_size = 100 try: for epoch in range(nb_epochs): print('Epoch', epoch+1, '/', nb_epochs) tstart = time() # Training batch_generator = db.generate_batches(batch_size, dset='train') nb_train_batches = 0; train_loss = 0.0; train_accuracy = 0.0 for X, y in batch_generator: loss, accuracy = model.train_on_batch(X, y, accuracy=True) train_loss += loss train_accuracy += accuracy nb_train_batches += 1 # Validation batch_generator = db.generate_batches(batch_size, dset='val') nb_val_batches = 0; val_loss = 0.0; val_accuracy = 0.0 for X, y in batch_generator: loss, accuracy = model.test_on_batch(X, y, accuracy=True) val_loss += loss val_accuracy += accuracy nb_val_batches += 1 # Verbose
criterion = nn.CrossEntropyLoss() optimizer = optim.Adam(network.classifier.parameters(), learning_rate) trainLoss = [] valLoss = [] trainAcc = [] valAcc = [] # Create a minibatch and train the network epochs = 10 running_loss = 0.0 x = 1 numberOfSaves = 0 for epoch in range(epochs): for inputs_train, labels_train in db.generate_batches(batch_size=100, dset='train', rest=False): batch_size = 100 ######################################### ###########TRAIN THE MODEL############### ######################################### # print(np.shape(inputs_train)) # print(np.shape(labels_train)) # Creating an array of class labels labelValues = [labels.tolist().index(1) for labels in labels_train] labelValues = np.array(labelValues) # print(labelValue) # print(np.shape(labels_train[:, x])) # print('Index: {}'.format(labels_train.index(1))) inputs_train = torch.from_numpy(inputs_train) labels_train = torch.from_numpy(labelValues)