Esempio n. 1
0
# Fully connected layer model
model = ModelUtils.getFcModel(num_ftrs, NUM_CLASSES,
                              parameters['hidden_layers'],
                              parameters['fc_neurons'],
                              parameters['activation'],
                              parameters['dropout_bp'])

model.to(device)  # Move model to device
# summary(model,input_size=(TRAIN_BATCH_SIZE,num_ftrs))

# Predictive Coding model
pc_model_architecture = ModelUtils.getPcModelArchitecture(
    num_ftrs, NUM_CLASSES, parameters['hidden_layers'],
    parameters['fc_neurons'])

pc_model = PcTorch(pc_model_architecture)
pc_model.set_training_parameters(TRAIN_BATCH_SIZE,
                                 INFERENCE_STEPS,
                                 parameters['activation'],
                                 parameters['optimizer'],
                                 parameters['lr_pc'],
                                 parameters['momentum_pc'],
                                 normalize_input=True)

# Loss and optmizer
criterion = nn.CrossEntropyLoss()
optimizer = 'None'

if parameters['optimizer'] == 'sgd':
    optimizer = optim.SGD(model.parameters(),
                          lr=parameters['lr_bp'],
for i in range(len(x_train)):
    x_train_list.append(x_train[i].flatten().astype(np.float))

for i in range(len(x_test)):
    x_test_list.append(x_test[i].flatten().astype(np.float))

# normalize dataset
x_train_list, mi, ma = util.normalize_dataset(x_train_list)
x_test_list, mi, ma = util.normalize_dataset(x_test_list)

# Get time before training
t_start = datetime.datetime.now()
print("Starting timer")

# Initialize network and train
model_torch = PcTorch(NETWORK_ARCHITECTURE)
model_torch.train(x_train_list,
                  y_train_list,
                  x_test_list,
                  y_test_list,
                  batch_size=BATCH_SIZE,
                  epochs=EPOCHS,
                  max_it=INFERENCE_STEPS,
                  optmizer=OPTIMIZER,
                  activation=ACTIVATION,
                  dataset_perc=DATA_PERC,
                  learning_rate=LR)

# Get time after training
t_end = datetime.datetime.now()
elapsedTime = (t_end - t_start)