def evaluate_classification(path): W = 1280 H = 180 dataset_name = 'albedo' transform = tv.transforms.Compose([ Crop(H), tv.transforms.ToTensor(), tv.transforms.Normalize(mean=[0.5], std=[1.0]) ]) dataset = build_datasets(path, dataset_name, transform) loader = DataLoader(dataset, batch_size=len(dataset), shuffle=False, num_workers=4) device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") loaded_dict = torch.load('model.m', map_location='cpu') print('Loading model...') model = Alexnet() model.load_state_dict(loaded_dict['model']) model = model.to(device) model.eval() with torch.no_grad(): print('Loading data...') data = next(iter(loader)) X = data['image'].float().to(device) X = X.expand(-1, 3, -1, -1) print('Evaluating model...') pred = model(X).round().byte().view(-1) print('Writing to file...') pred_str = [ build_string(name, value) for name, value in zip(data['image_name'], pred.tolist()) ] with open("classification_results.txt", "a") as file: file.write(''.join(pred_str)) return pred
path_training_ok = '/mnt/DATA/beantech_contestAI/Dataset2/campioni OK' path_training_ko = '/mnt/DATA/beantech_contestAI/Dataset2/campioni KO' path_validation_ok = '/mnt/DATA/beantech_contestAI/Dataset1/campioni OK' path_validation_ko = '/mnt/DATA/beantech_contestAI/Dataset1/campioni KO' num_epochs = 1 dataset_name = 'albedo' tranform = tv.transforms.Compose([Crop(H), tv.transforms.ToTensor()]) # (training_set_ok,), (training_set_ko,) = preproc.build_datasets(path_training_ok, path_training_ko, dataset_name, tranform, split=True) # training_loader_ok = DataLoader(training_set_ok, batch_size=int(opt['batch_size']/2), shuffle=True) # training_loader_ko = DataLoader(training_set_ko, batch_size=int(opt['batch_size']/2), shuffle=True) validation_set, = preproc.build_datasets(path_validation_ok, path_validation_ko, dataset_name, tranform) val, test = preproc.split(validation_set, 0.7, random_state=55) val_loader = DataLoader(val, batch_size=len(val), shuffle=False) test_loader = DataLoader(test, batch_size=len(test), shuffle=False) val_data = next(iter(val_loader)) del val_loader, val # test_data = next(iter(test_loader)) # del test_loader, test device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") #%%
H = 180 path_training_ok = join(opt['train_path'], 'campioni OK') path_training_ko = join(opt['train_path'], 'campioni KO') path_validation_ok = join(opt['validation_path'], 'campioni OK') path_validation_ko = join(opt['validation_path'], 'campioni KO') num_epochs = 1 dataset_name = 'albedo' transform = tv.transforms.Compose([ Crop(H), tv.transforms.ToTensor(), tv.transforms.Normalize(mean=[0.5], std=[1.0]) ]) (training_set_ok, ), (training_set_ko, ) = preproc.build_datasets( path_training_ok, path_training_ko, dataset_name, transform, split=True) training_loader_ok = DataLoader(training_set_ok, batch_size=int(opt['batch_size'] / 2), shuffle=True) training_loader_ko = DataLoader(training_set_ko, batch_size=int(opt['batch_size'] / 2), shuffle=True) validation_set, = preproc.build_datasets(path_validation_ok, path_validation_ko, dataset_name, transform) val, test = preproc.split(validation_set, 0.7, random_state=56) val_loader = DataLoader(val, batch_size=len(val), shuffle=False) test_loader = DataLoader(test, batch_size=len(test), shuffle=False)