def predict(args): """Loads model from file, makes predictions and computes metrics. All created files are saved to args.out_dir directory if provided, or to results_<task> otherwise. Creates files: conf_matrix.png file with confusion matrix, report.txt with various metrics, preds_{task}.npy with raw predictions. """ if args.task.startswith('4'): test_features = np.load('test_features_3b.npy') test_labels = get_labels(split='test') y_true = np.array([CLASSES.index(l) for l in test_labels]) out_dir = args.out_dir or f'results_{args.task}' for c in [0.001, 0.01, 0.1, 1.0, 10]: svc = SVC() svc.load_from_file(f'svc_{args.task}_C_{c}') y_pred = svc.predict(test_features) evaluate(y_true, y_pred, None, CLASSES, os.path.join(out_dir, f'C_{c}')) else: model: Model = load_model( f'model_fc_{args.task}.h5', custom_objects={'top_5_accuracy': top_5_accuracy}) test_generator = create_data_generator(split='test', target_size=args.target_size, batch_size=args.batch_size, shuffle=False) # get predictions preds = model.predict_generator(test_generator, verbose=1) # create output directory out_dir = args.out_dir or f'results_{args.task}' os.makedirs(out_dir, exist_ok=True) # save numpy array with predictions save_file = os.path.join(out_dir, f'preds_{args.task}.npy') np.save(save_file, preds) print(f'Predictions saved to: {save_file}') # first, prepare y_pred, y_true and class names # y_pred are classes predicted with the highest probability y_pred = np.array([np.argmax(x) for x in preds]) # since we did not shuffle data in data generator, # classes attribute of the generator contains true labels for each sample y_true = np.array(test_generator.classes) # class_names = list(test_generator.class_indices.keys()) # class_names.sort(key=lambda x: test_generator.class_indices[x]) evaluate(y_true, y_pred, preds, CLASSES, out_dir)
def predict(y, z): clf = SVC(kernel="linear") clf.load_from_file("./model") clf.predict([[y, z]])[0]