def plot_interactive(f_2d, indexs, dataset_obj: Dataset, labels): n = f_2d.shape[0] d_point_index = {i: indexs[i] for i in range(f_2d.shape[0])} img = imshow_util_uint8( dataset_obj.get_train_image_at(indexs[0])[0][0], dataset_obj) # create figure and plot scatter fig, axis = plt.subplots(1, 2) line = axis[0].scatter(f_2d[:, 0], f_2d[:, 1], c=labels.reshape(n), s=5) temp = axis[1].imshow(img) def hover(event): # if the mouse is over the scatter points if line.contains(event)[0]: ind = line.contains(event)[1]["ind"][0] r_ind = d_point_index[ind] print("Getting {0} index".format(r_ind)) img = imshow_util_uint8( dataset_obj.get_train_image_at(r_ind)[0][0], dataset_obj) temp.set_data(img) fig.canvas.draw_idle() # add callback for mouse moves fig.canvas.mpl_connect('motion_notify_event', hover) plt.show()
def generate(self, dataset_object: Dataset, index_list, mask_file, select_path): d_name = str(dataset_object.__class__.__name__) gen_name = str(self.__class__.__name__) f_name = '{0}_{1}_{2}'.format(d_name, gen_name, now_string()) out_folder = os.path.join('gen_images', f_name) os.makedirs(out_folder, exist_ok=True) gen_dict = {} # open masks pickle with open(mask_file, 'rb') as f: mask_dict = pickle.load(f) for ind in index_list: mask = mask_dict['masks'][ind] img = dataset_object.get_train_image_at(ind)[0][ 0] # returns (img,label) , img = [batch,w,h,c] result = self.generate_img_mask(img, mask) cv2.imwrite(os.path.join(out_folder, '{0}__mask.png'.format(ind)), mask.astype(np.uint8) * 255) gen_dict[ind] = [] for ind_out, elem in enumerate(result): out_path_img = os.path.join( out_folder, '{0}__{1}.png'.format(ind, ind_out)) cv2.imwrite(out_path_img, elem) gen_dict[ind].append(out_path_img) exp_json = { 'date': datetime.now().strftime('%Y-%m-%d %H:%M:%S'), 'dataset': d_name, 'used_select': select_path, 'index_map': gen_dict, 'mask_file': str(mask_file), 'generator': gen_name } print("Results in " + str(os.path.join(out_folder, 'exp_details.json'))) with open(os.path.join(out_folder, 'exp_details.json'), 'w') as f: json.dump(exp_json, f)
def gabor_features_raw_data(kernels,dataset_obj : Dataset,img_shape): limit = 1400 indexs=[] labels=[] imgs_raw=[] cont=0 for indx in dataset_obj.get_index_list(): img,label = dataset_obj.get_train_image_at(indx) if not(label in ['1','3']): continue indexs.append(indx) img = img / 255 labels.append(int(label)) imgs_raw.append(img.reshape(img_shape) ) cont+=1 if cont > (limit): break # por cada imagen calcular activacion dataset = [] r_labels = [] indexs_names = [] for ind, cam_img in enumerate(imgs_raw): if ind > len(labels): break if ind != 0 and ind % 100 == 0: print(ind) current_vector = [] for k, kernel in enumerate(kernels): filtered = ndi.convolve(cam_img, kernel, mode='wrap') # reducir a 8x8 res = imresize(filtered, (8, 8)) # f,ax = plt.subplots(1,3) # ax[0].imshow(cam_img) # ax[1].imshow(kernel) # ax[2].imshow(res) # plt.show() # convertir a vector filter_vector = res.flatten() current_vector.append(filter_vector) # concadenar por todos los filtros current_vector = np.array(current_vector) current_vector = current_vector.flatten() # agrega como un vector al dataset dataset.append(current_vector) r_labels.append((labels[ind])) ind_to_add = indexs[ind].decode('utf8') if type(indexs[ind]) == bytes else indexs[ind] indexs_names.append(ind_to_add) dataset = np.vstack(dataset) return dataset,indexs_names,r_labels,imgs_raw