def sub_16_map(self, data_frame, dataset_images_features): metadata_arr_list = list() metadata_arr_list.append(['dorsal left', 1, 'male']) metadata_arr_list.append(['dorsal left', 0, 'male']) metadata_arr_list.append(['dorsal right', 1, 'male']) metadata_arr_list.append(['dorsal right', 0, 'male']) metadata_arr_list.append(['palmar left', 1, 'male']) metadata_arr_list.append(['palmar left', 0, 'male']) metadata_arr_list.append(['palmar right', 1, 'male']) metadata_arr_list.append(['palmar right', 0, 'male']) metadata_arr_list.append(['dorsal left', 1, 'female']) metadata_arr_list.append(['dorsal left', 0, 'female']) metadata_arr_list.append(['dorsal right', 1, 'female']) metadata_arr_list.append(['dorsal right', 0, 'female']) metadata_arr_list.append(['palmar left', 1, 'female']) metadata_arr_list.append(['palmar left', 0, 'female']) metadata_arr_list.append(['palmar right', 1, 'female']) metadata_arr_list.append(['palmar right', 0, 'female']) features_image = FeaturesImages('SIFT') metadata_vectors_16_map = {} count = 0 for metadata_arr in metadata_arr_list: count = count + 1 sift_cluster_vector = self.get_metadata_sift_feature_vector(data_frame, metadata_arr, dataset_images_features) metadata_vectors_16_map['combination'+str(count)] = sift_cluster_vector metadata_vectors_16_map = features_image.compute_sift_new_features(metadata_vectors_16_map) return metadata_vectors_16_map
def get_image_dataset_features(self): features_obj = FeaturesImages(self.feature_name, self.test_folder_path) features_obj.compute_features_images_folder() self.image_feature_map = misc.load_from_pickle(self.pickle_file_folder, self.feature_name) self.images_list = list(self.image_feature_map.keys()) self.original_feature_map = copy.deepcopy(self.image_feature_map) self.original_image_list = copy.deepcopy(self.images_list)
def get_database_image_features(self, test_folder=None, decomposition=None, reduced_dimension=False, metadata_pickle=None): test_folder_path = os.path.join( Path(os.path.dirname(__file__)).parent, test_folder) test_image_path = os.path.join(test_folder_path, self.test_image_id) if not reduced_dimension: path = os.path.dirname(__file__) feature = self.model_name features_images = FeaturesImages(self.model_name, test_folder_path) # if not(os.path.exists(os.path.join(path, feature+'.pkl'))): features_images.compute_features_images_folder() test_image_features = features_images.compute_image_features( test_image_path) dataset_images_features = misc.load_from_pickle( os.path.dirname(__file__), feature) return test_image_features, dataset_images_features # return dataset_images_features[self.test_image_id], dataset_images_features else: feature = self.model_name reduced_dimension_pickle_path = os.path.join( Path(os.path.dirname(__file__)).parent, 'Phase2', 'pickle_files') if metadata_pickle: dataset_image_features = misc.load_from_pickle( reduced_dimension_pickle_path, metadata_pickle) test_image_features = dataset_image_features[ self.test_image_id] return test_image_features, dataset_image_features if not (os.path.exists( os.path.join( reduced_dimension_pickle_path, feature + '_' + decomposition.decomposition_name + '.pkl'))): print( 'Pickle file not found for the Particular (model,Reduction)' ) print( 'Runnning Task1 for the Particular (model,Reduction) to get the pickle file' ) decomposition.dimensionality_reduction() dataset_images_features = misc.load_from_pickle( reduced_dimension_pickle_path, feature + '_' + decomposition.decomposition_name, self.k) test_image_features = dataset_images_features[self.test_image_id] return test_image_features, dataset_images_features
def get_unlabelled_classification_image_features(self, image_id, unlabelled_folder_path): test_image_path = os.path.join( Path(os.path.dirname(__file__)).parent, unlabelled_folder_path, image_id) features_images = FeaturesImages( self.feature_name, os.path.join( Path(os.path.dirname(__file__)).parent, unlabelled_folder_path)) unlabelled_image_features = features_images.compute_image_features( test_image_path) return [unlabelled_image_features]
def get_similarity_value(self, images_list, dataset_images_features): feature = self.model_name features_images = FeaturesImages(feature) model = features_images.get_model() test_image_features = dataset_images_features[self.test_image_id] similarity_value = 0 for sub_image_id in images_list: subject_image_features = dataset_images_features[sub_image_id] similarity_value = similarity_value + model.similarity_fn( test_image_features, subject_image_features) return similarity_value
def __init__(self, decomposition_name, k_components, feature_extraction_model_name, test_folder_path, metadata_images_list=None, metadata_label=None): self.decomposition_name = decomposition_name self.k_components = k_components self.decomposition_model = None self.feature_extraction_model_name = feature_extraction_model_name self.test_folder_path = test_folder_path self.feature_extraction_object = FeaturesImages(self.feature_extraction_model_name, self.test_folder_path) self.feature_extraction_model = self.feature_extraction_object.get_model() self.database_matrix = [] self.database_image_id = [] self.reduced_pickle_file_folder = os.path.join(os.path.dirname(__file__), 'pickle_files') self.metadata_images_list = metadata_images_list self.metadata_label = metadata_label or '' self.set_database_matrix()
def get_similar_images(self, test_folder=None, decomposition=None, reduced_dimension=False, metadata_pickle=None): test_folder_path = os.path.join( Path(os.path.dirname(__file__)).parent, test_folder) test_image_path = os.path.join(test_folder_path, self.test_image_id) try: # Image is present misc.read_image(test_image_path) except FileNotFoundError: print('ImageId is not in the folder specified.') return test_image_features, dataset_images_features = self.get_database_image_features( test_folder, decomposition, reduced_dimension, metadata_pickle) test_folder_path = os.path.join( Path(os.path.dirname(__file__)).parent, test_folder) features_images = FeaturesImages(self.model_name) model = features_images.get_model() ranking = {} for image_id, feature_vector in tqdm(dataset_images_features.items()): if image_id != self.test_image_id: distance = model.similarity_fn(test_image_features, feature_vector) ranking[image_id] = distance sorted_results = collections.OrderedDict( sorted(ranking.items(), key=lambda val: val[1], reverse=model.reverse_sort)) top_k_items = { item: sorted_results[item] for item in list(sorted_results)[:self.k + 1] } plot_images = {} for image_id in top_k_items.keys(): if image_id != self.test_image_id: image_path = os.path.join(test_folder_path, image_id) plot_images[image_path] = top_k_items[image_id] print('Plotting Similar Images') misc.plot_similar_images(plot_images)
def get_unlabelled_images_decomposed_features(self): test_dataset_folder_path = os.path.abspath( os.path.join( Path(os.getcwd()).parent, self.unlabelled_dataset_path)) images_list = list( misc.get_images_in_directory(test_dataset_folder_path).keys()) images_decomposed_features = {} for image_id in images_list: features_images = FeaturesImages(self.feature_name, test_dataset_folder_path) test_image_path = os.path.join(test_dataset_folder_path, image_id) test_image_features = list() test_image_features.append( features_images.compute_image_features(test_image_path)) if self.decomposition_name != '': decomposed_features = self.decomposition.decomposition_model.get_new_image_features_in_latent_space( test_image_features) images_decomposed_features[image_id] = decomposed_features else: images_decomposed_features[image_id] = test_image_features return images_decomposed_features
def set_features(self): if self.decomposition_name != '': self.decomposition = Decomposition(self.decomposition_name, 100, self.feature_name, self.labelled_dataset_path) self.decomposition.dimensionality_reduction() else: test_dataset_folder_path = os.path.abspath( os.path.join( Path(os.getcwd()).parent, self.labelled_dataset_path)) print('Getting the Model Features from Phase1') features_obj = FeaturesImages(self.feature_name, test_dataset_folder_path) features_obj.compute_features_images_folder() self.unlabelled_dataset_features = self.get_unlabelled_images_decomposed_features( ) misc.save2pickle(self.unlabelled_dataset_features, self.reduced_pickle_file_folder, feature='unlabelled_' + self.decomposed_feature) print("Getting features for dorsal_images ") self.dorsal_features = self.get_features('dorsal') print("Getting features for palmar images") self.palmar_features = self.get_features('palmar')
import sys from features_images import FeaturesImages from similar_images import Similarity task = input("Please specify the task number: ") model = input("1.CM\n2.LBP\n3.HOG\n4.SIFT\nSelect model: ") if task == '1': image_id = input("Please specify the test image file name: ") features_image = FeaturesImages(model) features_image.compute_image_features(image_id, print_arr=True) elif task == '2': folder_path = input("Please specify test folder path: ") features_folder = FeaturesImages(model, folder_path) features_folder.compute_features_images_folder() elif task == '3': image_id = input("Please specify the test image file name: ") k = int(input("Please specify the value of K: ")) test_dataset_path = input("Please specify test folder path: ") similarity = Similarity(model, image_id, k) similarity.get_similar_images(test_dataset_path)
def get_main_features(feature_name, dataset_folder_path): folder = os.path.join(Path(os.path.dirname(__file__)).parent, 'Phase1') feature_extraction_object = FeaturesImages(feature_name, dataset_folder_path) feature_extraction_object.compute_features_images_folder() return misc.load_from_pickle(folder, feature_name)
class Decomposition: def __init__(self, decomposition_name, k_components, feature_extraction_model_name, test_folder_path, metadata_images_list=None, metadata_label=None): self.decomposition_name = decomposition_name self.k_components = k_components self.decomposition_model = None self.feature_extraction_model_name = feature_extraction_model_name self.test_folder_path = test_folder_path self.feature_extraction_object = FeaturesImages(self.feature_extraction_model_name, self.test_folder_path) self.feature_extraction_model = self.feature_extraction_object.get_model() self.database_matrix = [] self.database_image_id = [] self.reduced_pickle_file_folder = os.path.join(os.path.dirname(__file__), 'pickle_files') self.metadata_images_list = metadata_images_list self.metadata_label = metadata_label or '' self.set_database_matrix() def set_database_matrix(self): parent_directory_path = Path(os.path.dirname(__file__)).parent pickle_file_directory = os.path.join(parent_directory_path, 'Phase1') print('Getting the Model Features from Phase1') self.feature_extraction_object.compute_features_images_folder() database_images_features = misc.load_from_pickle(pickle_file_directory, self.feature_extraction_model_name) if self.metadata_images_list is not None: print("Taking images based on metadata") for image_id in self.metadata_images_list: self.database_matrix.append(database_images_features[image_id]) self.database_image_id.append(image_id) else: for image_id, feature_vector in database_images_features.items(): self.database_matrix.append(feature_vector) self.database_image_id.append(image_id) def dimensionality_reduction(self): # self.set_database_matrix() # Note : when we have number of images <=20 or features <=20 , we are getting an error # this is because the database_matrix has <=20 images and the reduction models, # should have n_components parameters <= n,m # Hence, we have to take the min(min(len(self.database_matrix[0]),len(self.database_matrix)),20) if self.decomposition_name == 'PCA': self.decomposition_model = PCAModel(self.database_matrix, self.k_components, self.database_image_id) elif self.decomposition_name == 'SVD': self.decomposition_model = SVD(self.database_matrix, self.k_components, self.database_image_id) elif self.decomposition_name == 'NMF': self.decomposition_model = NMFModel(self.database_matrix, self.k_components, self.database_image_id) elif self.decomposition_name == 'LDA': self.decomposition_model = LDAModel(self.database_matrix, self.k_components, self.database_image_id) self.decomposition_model.decompose() print('Decomposition Complete') decomposed_database_matrix = self.decomposition_model.get_decomposed_data_matrix() reduced_dimension_folder_images_dict = {} for image_id, reduced_feature_vector in zip(self.database_image_id, decomposed_database_matrix): reduced_dimension_folder_images_dict[image_id] = reduced_feature_vector if self.metadata_label != '': misc.save2pickle(reduced_dimension_folder_images_dict, self.reduced_pickle_file_folder, feature=(self.feature_extraction_model_name+'_'+self.decomposition_name+ '_' + self.metadata_label)) else: misc.save2pickle(reduced_dimension_folder_images_dict, self.reduced_pickle_file_folder, feature=(self.feature_extraction_model_name + '_' + self.decomposition_name))