# loop over the images in each sub-folder for file in images: # get the image file name file_path = os.path.join(current_dir, file) # read the image and resize it to a fixed-size image = cv2.imread(file_path) image = cv2.resize(image, fixed_size) #################################### # Global Feature extraction #################################### fv_histogram = fd_histogram(image) fv_4 = fd_4(image) fv_haralick = fd_haralick(image) fv_hu_moments = fd_hu_moments(image) # new features fv_Fast = fd_Fast(image) fv_kaze = fd_Kaze(image) ################################### # Concatenate global features ################################### # global_feature = np.hstack([fv_histogram, fv_4, fv_haralick, fv_hu_moments]) # global_feature = np.hstack([fv_histogram, fv_kaze]) global_feature = np.hstack([fv_histogram, fv_Fast])
# fit the training data to the model clf.fit(trainDataGlobal, trainLabelsGlobal) # [f for f in os.listdir(test_path) if os.path.isfile(os.path.join(test_path, f))] # loop through the test images for idx, file in enumerate(glob.glob(test_path + "/*.jpg")): # read the image image = cv2.imread(file) # resize the image image = cv2.resize(image, fixed_size) #################################### # Global Feature extraction #################################### fv_histogram = fd_haralick(image) fv_4 = fd_4(image) ################################### # Concatenate global features ################################### global_feature = np.hstack([fv_histogram, fv_4]) # scale features in the range (0-1) # scaler = MinMaxScaler(feature_range=(0, 1)) # reshaped_features = np.reshape(global_feature, (1, len(global_feature))) # rescaled_feature = scaler.fit_transform(reshaped_features) rescaled_feature = global_feature # predict label of test image prediction = clf.predict(rescaled_feature.reshape(1, -1))[0]