def learn_on_pos_neg_files(self, file_train_pos, file_train_neg, delimeter='\t', rand_pos=[], rand_neg=[]): X_train_pos = (tools_IO.load_mat(file_train_pos, numpy.chararray, delimeter)).astype(numpy.str) X_train_neg = (tools_IO.load_mat(file_train_neg, numpy.chararray, delimeter)).astype(numpy.str) X_train_pos = (X_train_pos[:, 1:]) X_train_pos = X_train_pos.astype('float32') X_train_neg = X_train_neg[:, 1:].astype('float32') if rand_pos != []: X_train_pos = X_train_pos[rand_pos] if rand_neg != []: X_train_neg = X_train_neg[rand_neg] X_train = numpy.vstack((X_train_pos, X_train_neg)) Y_train = numpy.hstack( (numpy.full(X_train_pos.shape[0], +1), numpy.full(X_train_neg.shape[0], -1))) self.classifier.learn(X_train, Y_train) return
def plot_two_series(filename1, filename2, caption=''): mat1 = tools_IO.load_mat(filename1, dtype=numpy.float32, delim='\t') mat2 = tools_IO.load_mat(filename2, dtype=numpy.float32, delim='\t') if len(mat1.shape)==1: SMAPE = calc_series_SMAPE(numpy.array([mat1, mat2]).T) labels = [filename1.split('/')[-1], filename2.split('/')[-1]] fig = plt.figure(figsize=(12, 6)) fig.subplots_adjust(hspace=0.01) fig.canvas.set_window_title(caption) plot_series(plt.subplot(1, 1, 1), numpy.vstack((mat1,mat2)).T, labels=labels,SMAPE=SMAPE) plt.tight_layout() else: fig = plt.figure(figsize=(12, 6)) fig.subplots_adjust(hspace=0.01) fig.canvas.set_window_title(caption) labels = [filename1.split('/')[-1], filename2.split('/')[-1]] S = numpy.minimum(mat1.shape[1],5) for i in range(0,S): SMAPE = calc_series_SMAPE(numpy.array([mat1[:,i], mat2[:,i]]).T) plot_series(plt.subplot(S, 1, i+1), numpy.vstack((mat1[:,i], mat2[:,i])).T, labels=labels, SMAPE=SMAPE) plt.tight_layout() return
def generate_data_grid(self, filename_data_train, filename_data_test, filename_data_grid): data = tools_IO.load_mat(filename_data_train, numpy.chararray, '\t') X_train = data[:, 1:].astype('float32') data = tools_IO.load_mat(filename_data_test, numpy.chararray, '\t') X_test = data[:, 1:].astype('float32') X = numpy.vstack((X_train, X_test)) if (X.shape[1] != 2): return minx = numpy.min(X[:, 0]) maxx = numpy.max(X[:, 0]) miny = numpy.min(X[:, 1]) maxy = numpy.max(X[:, 1]) step = 10 data_grid = numpy.zeros((step * step, 2)) target_grid = numpy.zeros(step * step) for s1 in range(0, step): for s2 in range(0, step): data_grid[s1 + (step - 1 - s2) * step, 0] = minx + s1 * (maxx - minx) / (step - 1) data_grid[s1 + (step - 1 - s2) * step, 1] = miny + s2 * (maxy - miny) / (step - 1) tools_IO.save_data_to_feature_file_float(filename_data_grid, data_grid, target_grid) return
def calc_SMAPE_for_files(self, filename1, filename2): mat1 = tools_IO.load_mat(filename1, dtype=numpy.float32, delim='\t') mat2 = tools_IO.load_mat(filename2, dtype=numpy.float32, delim='\t') if len(mat1.shape) == 1: mat1 = numpy.array([mat1]) mat2 = numpy.array([mat2]) q = 0 for v in range(0, mat1.shape[1]): q += tools_IO.smape(mat1[:, v], mat2[:, v]) return q / mat1.shape[1]
def draw_GT_pred(self, filename_scrs, filename_out, th, has_header=True): X = tools_IO.load_mat(filename_scrs, numpy.chararray, '\t') if has_header: X = X[1:, :] else: X = X[:, :] labels = (X[:, 0]).astype('float32') scores = X[:, 1:].astype('float32') H = 100 color_GT = (64, 128, 0) color_pred = (190, 128, 0) image = numpy.full((H, X.shape[0], 3), 0xC0) for c, label in enumerate(labels): if label <= 0: image[70:75, c] = color_GT else: image[20:25, c] = color_GT for c, score in enumerate(scores): if score <= th: image[76:81, c] = color_pred else: image[14:19, c] = color_pred #image = cv2.flip(image,0) cv2.imwrite(filename_out, image) return
def score_feature_file(self, file_test, filename_scrs=None, delimeter='\t', append=0, rand_sel=None, has_header=True, has_labels_first_col=False): if not os.path.isfile(file_test): return data_test = tools_IO.load_mat(file_test, numpy.chararray, delimeter) header, Y_test, X_test = self.preprocess_header( data_test, has_header=has_header, has_labels_first_col=has_labels_first_col) Y_test = numpy.array(Y_test, dtype=numpy.float) Y_test = numpy.array(Y_test, dtype=numpy.int) if rand_sel is not None: X_test = X_test[rand_sel] Y_test = Y_test[rand_sel] score = self.classifier.predict(X_test) score = (100 * score[:, 1]).astype(int) if (filename_scrs != None): tools_IO.save_labels(filename_scrs, Y_test, score, append, delim=delimeter) return
def score_feature_file(self, file_test, filename_scrs=None, delimeter='\t', append=0, rand_sel=[]): data_test = tools_IO.load_mat(file_test, numpy.chararray, delimeter) data_test = data_test[:, :] labels_test = (data_test[:, 0]).astype(numpy.str) data_test = data_test[:, 1:] if data_test[0, -1] == b'': data_test = data_test[:, :-1] data_test = data_test.astype('float32') if rand_sel != []: data_test = data_test[rand_sel] labels_test = labels_test[rand_sel] score = self.classifier.predict(data_test) score = (100 * score[:, 1]).astype(int) if (filename_scrs != None): tools_IO.save_labels(filename_scrs, labels_test, score, append, delim=delimeter) return
def filter_landmarks(self,filename_in, filename_out,N=5,delim='\t'): dataset = tools_IO.load_mat(filename_in, delim=delim, dtype=numpy.str) result = [] idx_x = numpy.arange(0,dataset.shape[0], 2) idx_y = numpy.arange(1,dataset.shape[0], 2) for c in range(1, dataset.shape[1]): D = numpy.array(dataset[:,c],dtype=numpy.float) if True:#c in [0,1,2,3,14,15,16,17]: X = D[idx_x] Y = D[idx_y] Rx = tools_filter.do_filter_median(X,N) Ry = tools_filter.do_filter_median(Y,N) D[0::2] = Rx D[1::2] = Ry result.append(D) result = (numpy.array(result).T).astype(numpy.str) names = dataset[:, 0] result = numpy.insert(result,0,names,axis=1) myfile = open(filename_out, "w") numpy.savetxt(myfile, result, fmt='%s', encoding='str', delimiter=delim) myfile.close() return
def load_nodes(self, filename_in, H, W): self.W, self.H = W, H X = tools_IO.load_mat(filename_in, delim=' ', dtype=numpy.int) I = 0 * X.copy() result, idx, i = [], 0, 0 while i < len(X): n_links, x, y, w = X[i], X[i + 1], X[i + 2], X[i + 3] link_ids = [] result.append([idx, x, 2 * self.H - y, w, link_ids]) I[i:i + 4 + n_links * 2] = idx i += 4 + n_links * 2 idx += 1 result, idx, i = [], 0, 0 while i < len(X): n_links, x, y, w = X[i], X[i + 1], X[i + 2], X[i + 3] link_ids, c = [], 0 while (c < n_links): offset = X[i + 4 + c * 2] link_ids.append(I[offset]) c += 1 result.append([idx, x, 2 * self.H - y, w, link_ids]) i += 4 + n_links * 2 idx += 1 self.nodes = result return result
def interpolate(self,filename_in, filename_out,N=5,delim='\t'): dataset = tools_IO.load_mat(filename_in, delim=delim, dtype=numpy.str) result = [] idx_x = numpy.arange(0,dataset.shape[0], 2) idx_y = numpy.arange(1,dataset.shape[0], 2) for c in range(1, dataset.shape[1]): D = numpy.array(dataset[:,c],dtype=numpy.float) X = D[idx_x] Y = D[idx_y] Rx = tools_filter.fill_zeros(X) Ry = tools_filter.fill_zeros(Y) D[0::2] = Rx D[1::2] = Ry result.append(D) result = (numpy.array(result).T).astype(numpy.str) names = dataset[:, 0] result = numpy.insert(result,0,names,axis=1) myfile = open(filename_out, "w") numpy.savetxt(myfile, result, fmt='%s', encoding='str', delimiter=delim) myfile.close() return
def plot_confusion_mat(plt,fig,filename_mat,caption=''): confusion_mat = tools_IO.load_mat(filename_mat,dtype=numpy.chararray,delim='\t')[:,:2] patterns = numpy.unique(confusion_mat[:, 0]) labels_fact = numpy.array([tools_IO.smart_index(patterns, each) for each in confusion_mat[:, 0]]) labels_pred = numpy.array([tools_IO.smart_index(patterns, each) for each in confusion_mat[:, 1]]) mat, descriptions,sorted_labels = tools_IO.preditions_to_mat(labels_fact, labels_pred, numpy.unique(confusion_mat[:,0])) ind = numpy.array([('%3d' % i) for i in range(0, mat.shape[0])]) TP = float(numpy.trace(mat)) plt.imshow(mat,cmap='jet') ax = fig.gca() ax.set_xticks(numpy.arange(mat.shape[1])) ax.set_yticks(numpy.arange(mat.shape[0])) ax.set_yticklabels(sorted_labels.astype(numpy.str)) for i in range(mat.shape[1]): for j in range(mat.shape[0]): ax.text(j, i, '%1.0f'% mat[i,j],ha="center", va="center", color='white') TP = numpy.trace(mat) ax.set_title(caption + " %1.2f" % (float(TP/numpy.sum(mat)))) return
def load_and_normalize(self, filename_input): self.scaler = MinMaxScaler(feature_range=(0, 1)) dataset = tools_IO.load_mat(filename_input, delim='\t', dtype=numpy.float32) data = self.scaler.fit_transform(dataset) self.scaler_dim = data.shape return data
def get_th(self, filename_scores_pos, filename_scores_neg): data = tools_IO.load_mat(filename_scores_pos, numpy.chararray, '\t')[1:, :] labels1 = (data[:, 0]).astype('float32') scores1 = (data[:, 1:]).astype('float32') data = tools_IO.load_mat(filename_scores_neg, numpy.chararray, '\t')[1:, :] labels2 = (data[:, 0]).astype('float32') scores2 = (data[:, 1:]).astype('float32') labels = numpy.hstack((labels1, labels2)).astype(int) scores = numpy.vstack((scores1, scores2)) fpr, tpr, thresholds = metrics.roc_curve(labels, scores) v = numpy.argmax(tpr + (1 - fpr)) th = thresholds[v] return th
def display_roc_curve_from_file(plt,fig,path_scores,caption=''): data = tools_IO.load_mat(path_scores, dtype = numpy.chararray, delim='\t') labels = (data [:, 0]).astype('float32') scores = data[:, 1:].astype('float32') fpr, tpr, thresholds = metrics.roc_curve(labels, scores) roc_auc = auc(fpr, tpr) plot_tp_fp(plt,fig,tpr,fpr,roc_auc,caption) return
def prepare_arrays_from_feature_files(self, path_input, patterns=numpy.array(['0', '1']), feature_mask='.txt', limit=1000000, has_header=True, has_labels_first_col=True): x = tools_IO.load_mat(path_input + ('%s%s' % (patterns[0], feature_mask)), numpy.chararray, delim='\t') if has_header: x = x[1:, :] X = numpy.full(x.shape[1], '-').astype(numpy.chararray) Y = numpy.array(patterns[0]) for i in range(0, patterns.shape[0]): x = tools_IO.load_mat(path_input + ('%s%s' % (patterns[i], feature_mask)), numpy.chararray, delim='\t') if has_header: x = x[1:, :] if (limit != 1000000) and (x.shape[0] > limit): idx_limit = numpy.sort( numpy.random.choice(x.shape[0], int(limit), replace=False)) x = x[idx_limit] X = numpy.vstack((X, x)) a = numpy.full(x.shape[0], i) Y = numpy.hstack((Y, a)) X = X[1:] Y = Y[1:] filenames = X[:, 0].astype(numpy.str) X = X[:, 1:].astype(numpy.float32) return (X, Y.astype(numpy.int32), filenames)
def learn_on_pos_neg_files(self, file_train_pos, file_train_neg, delimeter='\t', rand_pos=None, rand_neg=None, has_header=True, has_labels_first_col=False): X_train_pos = (tools_IO.load_mat(file_train_pos, numpy.chararray, delimeter)).astype(numpy.str) X_train_neg = (tools_IO.load_mat(file_train_neg, numpy.chararray, delimeter)).astype(numpy.str) if has_header: X_train_pos = X_train_pos[1:, :] X_train_neg = X_train_neg[1:, :] if has_labels_first_col: X_train_pos = X_train_pos[:, 1:] X_train_neg = X_train_neg[:, 1:] X_train_neg = X_train_neg.astype('float32') X_train_pos = X_train_pos.astype('float32') if rand_pos != []: X_train_pos = X_train_pos[rand_pos] if rand_neg != []: X_train_neg = X_train_neg[rand_neg] X_train = numpy.vstack((X_train_pos, X_train_neg)) Y_train = numpy.hstack( (numpy.full(X_train_pos.shape[0], +1), numpy.full(X_train_neg.shape[0], -1))) self.classifier.learn(X_train, Y_train) return
def get_th_pos_neg(self, filename_scores_pos, filename_scores_neg, delim='\t'): data = tools_IO.load_mat(filename_scores_pos, numpy.chararray, delim)[1:, :] scores1 = (data[:, 1:]).astype('float32') labels1 = numpy.full(len(scores1), 1) data = tools_IO.load_mat(filename_scores_neg, numpy.chararray, delim)[1:, :] scores0 = (data[:, 1:]).astype('float32') labels0 = numpy.full(len(scores0), 1) labels = numpy.hstack((labels1, labels0)).astype(int) scores = numpy.vstack((scores1, scores0)) fpr, tpr, thresholds = metrics.roc_curve(labels, scores) v = numpy.argmax(tpr + (1 - fpr)) th = thresholds[v] return th
def load_markers(self, filename_in, filename_marker_obj,marker_scale=None): if marker_scale is not None: self.marker_scale = marker_scale markers = tools_IO.load_mat(filename_in,dtype=numpy.float,delim=',') flag = self.my_VBO.remove_last_object() while flag==0: flag = self.my_VBO.remove_last_object() for marker in markers: self.my_VBO.append_object(filename_marker_obj, (0.7, 0.2, 0), do_normalize_model_file=True, svec=(self.marker_scale, self.marker_scale, self.marker_scale), tvec=marker) self.bind_VBO() return
def align_two_model(filename_obj1, filename_markers1, filename_obj2, filename_markers2, filename_obj_res, filename_markers_res): object1 = tools_wavefront.ObjLoader() object1.load_mesh(filename_obj1, do_autoscale=False) object2 = tools_wavefront.ObjLoader() object2.load_mesh(filename_obj2, do_autoscale=False) markers1 = tools_IO.load_mat(filename_markers1, dtype=numpy.float, delim=',') markers2 = tools_IO.load_mat(filename_markers2, dtype=numpy.float, delim=',') result_markers = markers1.copy() result_vertex = object1.coord_vert.copy() for dim in range(0, 3): min_value_s = markers1[:, dim].min() min_value_t = markers2[:, dim].min() max_value_s = markers1[:, dim].max() max_value_t = markers2[:, dim].max() scale = (max_value_t - min_value_t) / (max_value_s - min_value_s) result_markers[:, dim] = (result_markers[:, dim] - min_value_s) * scale + min_value_t result_vertex[:, dim] = (result_vertex[:, dim] - min_value_s) * scale + min_value_t tools_IO.save_mat(result_markers, filename_markers_res, delim=',') object1.export_mesh(filename_obj_res, X=result_vertex, idx_vertex=object1.idx_vertex) return
def plot_multiple_series(filename_fact, list_filenames, target_column=0,caption=''): fig = plt.figure(figsize=(12, 6)) fig.subplots_adjust(hspace =0.01) fig.canvas.set_window_title(caption) S = 1+len(list_filenames) Labels_train = numpy.array(['fact']+[each.split('/')[-1] for each in list_filenames]) Series = [] mat = tools_IO.load_mat(filename_fact, dtype=numpy.float32, delim='\t') if len(mat.shape) == 1: Series.append(mat) else: Series.append(mat[:, target_column]) for filename in list_filenames: mat = tools_IO.load_mat(filename,dtype=numpy.float32,delim='\t') if len(mat.shape)==1: Series.append(mat) else: Series.append(mat[:, target_column]) Series=numpy.array(Series).T SMAPE = calc_series_SMAPE(Series) for i in range(1, S): plot_series(plt.subplot(S-1,1,i), Series[:,[0,i]],labels=Labels_train[[0,i]],SMAPE=SMAPE[[0,i]]) plt.tight_layout() return
def get_th_train(self, filename_scores_train, delim='\t', has_header=True): X = tools_IO.load_mat(filename_scores_train, numpy.chararray, '\t') if has_header: X = X[1:, :] else: X = X[:, :] labels = (X[:, 0]).astype('float32') scores = X[:, 1:].astype('float32') fpr, tpr, thresholds = metrics.roc_curve(labels, scores) v = numpy.argmax(tpr + (1 - fpr)) th = thresholds[v] return th
def plot_learning_rates2(plt,fig,filename_mat): if not os.path.isfile(filename_mat): return mat = tools_IO.load_mat(filename_mat, dtype=numpy.chararray, delim='\t') dsc = mat[0,2].decode("utf-8") mat = mat[1:,:].astype(numpy.float32) x = numpy.arange(0,mat.shape[0]) plt.plot(x, mat[:,2]) plt.plot(x, mat[:,3]) plt.grid(which='major', color='lightgray', linestyle='--') ax = fig.gca() ax.set_title(dsc) return
def __init__(self,filename_config,filename_3dmarkers=None): self.name = "landmark_detector" self.idx_head = numpy.arange(0,27,1).tolist() self.idx_nose = numpy.arange(27, 36, 1).tolist() self.idx_eyes = numpy.arange(36, 48, 1).tolist() self.idx_mouth = numpy.arange(48, 68, 1).tolist() self.idx_removed_chin = numpy.arange(17,68,1).tolist() self.idx_removed_eyes = numpy.arange(0 ,68,1).tolist() for each in [37,38,40,41,43,44,46,47]: self.idx_removed_eyes.remove(each) self.model_68_points = self.__get_full_model_points() if filename_3dmarkers is not None: self.model_68_points = tools_IO.load_mat(filename_3dmarkers,dtype=numpy.float, delim=',') self.r_vec = None self.t_vec = None self.detector = dlib.get_frontal_face_detector() self.predictor = dlib.shape_predictor(filename_config) return
def extract_ellipses(self, image, min_size=50): temp_pgm = str(uuid.uuid4()) temp_eli = str(uuid.uuid4()) temp_poly = str(uuid.uuid4()) temp_svg = str(uuid.uuid4()) cv2.imwrite(self.folder_out + temp_pgm + '.pgm', tools_image.desaturate_2d(image)) #command = [self.bin_name_ELSD, self.folder_out + temp_pgm + '.pgm', self.folder_out + temp_eli + '.txt',self.folder_out + temp_poly + '.txt'] command = [ self.bin_name, self.folder_out + temp_pgm + '.pgm', self.folder_out + temp_eli + '.txt', self.folder_out + temp_poly + '.txt', self.folder_out + temp_svg + '.svg' ] subprocess.call(command, stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL) if os.path.isfile(self.folder_out + temp_eli + '.txt'): ellipses = tools_IO.load_mat(self.folder_out + temp_eli + '.txt', dtype=numpy.float, delim=' ') if len(ellipses.shape) == 1: ellipses = numpy.array([ellipses]) ellipses = ellipses[:, 1:] ellipses = [ e for e in ellipses if numpy.linalg.norm((e[0] - e[2], e[1] - e[3])) >= min_size ] else: ellipses = [] tools_IO.remove_file(self.folder_out + temp_pgm + '.pgm') tools_IO.remove_file(self.folder_out + temp_eli + '.txt') tools_IO.remove_file(self.folder_out + temp_poly + '.txt') #tools_IO.remove_file(self.folder_out + temp_svg + '.svg') return ellipses
def plot_2D_samples_from_folder(foldername,caption='',add_noice=0): local_filenames = fnmatch.filter(listdir(foldername), '*.*') fig = plt.figure() fig.canvas.set_window_title(caption) colors_list = list(('red', 'blue', 'green', 'orange', 'cyan', 'purple', 'black', 'gray', 'pink', 'darkblue')) i=0 for filename in local_filenames: data = tools_IO.load_mat(foldername+filename,dtype=numpy.chararray, delim='\t') labels = (data[:, 0]).astype('float32') X = data[:, 1:].astype('float32') if add_noice>0: noice = 0.05-0.1*numpy.random.random_sample(X.shape) X+= noice plt.plot(X[:, 0], X[:, 1], 'ro', color=colors_list[i%len(colors_list)], alpha=0.4) i+=1 plt.grid() return
def converter(filename_in, filename_out): data = tools_IO.load_mat(filename_in, dtype=numpy.float32) N = data.shape[0] // 3 ix = numpy.arange(0, N, 3) iy = numpy.arange(1, N + 1, 3) iz = numpy.arange(2, N + 2, 3) X, Y, Z = data[ix], data[iy], data[iz] xyz = numpy.vstack((numpy.vstack((X, Y)), Z)).T #del_triangles = Delaunay(numpy.array([X,Y,Z]).T).vertices pts = mlab.points3d(X, Y, Z) mesh = mlab.pipeline.delaunay2d(pts) surf = mlab.pipeline.surface(mesh) i = 0 #my_write_triangle_mesh(filename_out, xyz, del_triangles) #for triangle in del_triangles: # i0, i1, i2, i3 = triangle # p0, p1, p2, p3 = xyz[i0, :], xyz[i1, :], xyz[i2, :], xyz[i3, :] return
def E2E_features_2_classes(self, folder_out, filename_data_pos, filename_data_neg, filename_scrs_pos=None, filename_scrs_neg=None, fig=None): filename_data_grid = folder_out + 'data_grid.txt' filename_scores_grid = folder_out + 'scores_grid.txt' Pos = (tools_IO.load_mat(filename_data_pos, numpy.chararray, '\t')).shape[0] Neg = (tools_IO.load_mat(filename_data_neg, numpy.chararray, '\t')).shape[0] numpy.random.seed(125) idx_pos_train = numpy.random.choice(Pos, int(Pos / 2), replace=False) idx_neg_train = numpy.random.choice(Neg, int(Neg / 2), replace=False) idx_pos_test = [x for x in range(0, Pos) if x not in idx_pos_train] idx_neg_test = [x for x in range(0, Neg) if x not in idx_neg_train] self.generate_data_grid(filename_data_pos, filename_data_neg, filename_data_grid) model = self.learn_on_pos_neg_files(filename_data_pos, filename_data_neg, '\t', idx_pos_train, idx_neg_train) self.score_feature_file(filename_data_pos, filename_scrs=filename_scrs_pos, delimeter='\t', append=0, rand_sel=idx_pos_test) self.score_feature_file(filename_data_neg, filename_scrs=filename_scrs_neg, delimeter='\t', append=0, rand_sel=idx_neg_test) self.score_feature_file(filename_data_grid, filename_scrs=filename_scores_grid) model = self.learn_on_pos_neg_files(filename_data_pos, filename_data_neg, '\t', idx_pos_test, idx_neg_test) self.score_feature_file(filename_data_pos, filename_scrs=filename_scrs_pos, delimeter='\t', append=1, rand_sel=idx_pos_train) self.score_feature_file(filename_data_neg, filename_scrs=filename_scrs_neg, delimeter='\t', append=1, rand_sel=idx_neg_train) tpr, fpr, auc = tools_IO.get_roc_data_from_scores_file_v2( filename_scrs_pos, filename_scrs_neg) if (fig != None): th = self.get_th(filename_scrs_pos, filename_scrs_neg) tools_IO.display_roc_curve_from_descriptions(plt.subplot(1, 3, 3), fig, filename_scrs_pos, filename_scrs_neg, delim='\t') tools_IO.display_distributions(plt.subplot(1, 3, 2), fig, filename_scrs_pos, filename_scrs_neg, delim='\t') tools_IO.plot_2D_scores(plt.subplot(1, 3, 1), fig, filename_data_pos, filename_data_neg, filename_data_grid, filename_scores_grid, th, noice_needed=1, caption=self.classifier.name + ' %1.2f' % auc) plt.tight_layout() plt.show() return tpr, fpr, auc
def export_boxes(folder_out, folder_images, folder_labels_GT, mat_proj, point_van_xy): ObjLoader = tools_wavefront.ObjLoader() tools_IO.remove_files(folder_out, create=True) local_filenames = get_filenames(folder_images, '*.png,*.jpg')[:10] for index, local_filename in enumerate(local_filenames): base_name = local_filename.split('/')[-1].split('.')[0] print(base_name) filename_image = folder_images + local_filename filename_label = folder_labels_GT + base_name + '.txt' filename_calib = folder_calib + base_name + '.txt' image = tools_image.desaturate(cv2.imread(filename_image)) H, W = image.shape[:2] target_BEV_W, target_BEV_H = 256, 256 h_ipersp = tools_render_CV.get_inverce_perspective_mat_v2( image, target_BEV_W, target_BEV_H, point_van_xy) image_BEV = cv2.warpPerspective(image, h_ipersp, (target_BEV_W, target_BEV_H), borderValue=(32, 32, 32)) image_BEV[:2, :2] = 255 cv2.imwrite(folder_out + base_name + '_BEV.png', image_BEV) ObjLoader.export_material(folder_out + base_name + '.mtl', (255, 255, 255), base_name + '_BEV.png') ObjLoader.export_mesh(folder_out + base_name + '_BEV.obj', numpy.array([[-1, -1, 0], [-1, +1, 0], [+1, +1, 0], [+1, -1, 0]]), idx_vertex=[[0, 1, 2], [2, 3, 0]], coord_texture=[[0, 0], [0, 1], [1, 1], [1, 0]], filename_material=base_name + '.mtl') records = tools_IO.load_mat(filename_label, delim=' ', dtype=numpy.str) records = filter_records(records) colors = tools_draw_numpy.get_colors(len(records), colormap='rainbow') for c in range(len(records)): color = colors[c] color_hex = '%02x%02x%02x' % (color[2], color[1], color[0]) record = records[c] record = numpy.insert(record, 1, '0') record = numpy.insert(record, 1, '0') #points_3d = get_cube_3D(record) #idx_vert = [[0,1,2],[0,2,7],[3,4,5],[3,5,6],[0,1,4],[0,4,5],[2,3,6],[2,6,7],[0,5,6],[0,6,7],[1,2,3],[1,3,4]] #ObjLoader.export_material(folder_out + color_hex + '.mtl',color[[2,1,0]]) #ObjLoader.export_mesh(folder_out + base_name + '_%03d.obj'%c, points_3d,idx_vertex=idx_vert,filename_material=color_hex + '.mtl') corners_3d = get_cube_3D(record) points_2d = project_2D(mat_proj, corners_3d) points_2d_BEV = project_2D_BEV(points_2d[[2, 3, 6, 7]], h_ipersp) #image_2d = tools_draw_numpy.draw_convex_hull(image_BEV, points_2d_BEV, color.tolist(), transperency=0.25) #cv2.imwrite(folder_out+ base_name + '_%03d.png'%c,image_2d) points_3d_BEV = numpy.zeros((8, 3), dtype=numpy.float32) points_3d_BEV[:4, :2] = points_2d_BEV / 128 - 1 points_3d_BEV[4:, :2] = points_2d_BEV / 128 - 1 points_3d_BEV[:4, 2] = -0.01 points_3d_BEV[4:, 2] = -0.05 idx_vert = [[0, 1, 2], [2, 3, 0], [4, 5, 6], [6, 7, 4], [0, 1, 5], [4, 5, 0], [2, 3, 6], [6, 7, 3], [0, 3, 4], [7, 4, 3], [1, 2, 5], [6, 2, 5]] ObjLoader.export_material(folder_out + color_hex + '.mtl', color[[2, 1, 0]]) ObjLoader.export_mesh(folder_out + base_name + '_%03d.obj' % c, points_3d_BEV, idx_vertex=idx_vert, filename_material=color_hex + '.mtl') obj_filenames = numpy.sort( get_filenames(folder_out, '%s_*.obj' % base_name)) with open(folder_out + base_name + '.obj', 'w') as g: for obj_filename in obj_filenames: with open(folder_out + obj_filename, 'r') as f: g.writelines('#-------------------------------------\n') for each in f.readlines(): g.writelines(each) os.remove(folder_out + obj_filename) return
def draw_boxes(folder_out, folder_images, folder_labels_GT, mat_proj, point_van_xy): draw_cuboids = True tools_IO.remove_files(folder_out, create=True) local_filenames = get_filenames(folder_images, '*.png,*.jpg')[:10] for index, local_filename in enumerate(local_filenames): base_name = local_filename.split('/')[-1].split('.')[0] print(base_name) filename_image = folder_images + local_filename filename_label = folder_labels_GT + base_name + '.txt' image = tools_image.desaturate(cv2.imread(filename_image)) H, W = image.shape[:2] target_BEV_W, target_BEV_H = int(H * 0.75), H if not os.path.exists(filename_label): continue records = tools_IO.load_mat(filename_label, delim=' ', dtype=numpy.str) records = filter_records(records) colors = tools_draw_numpy.get_colors(len(records), colormap='rainbow') image_2d = image.copy() h_ipersp = tools_render_CV.get_inverce_perspective_mat_v2( image, target_BEV_W, target_BEV_H, point_van_xy, 20, 2, 2) image_BEV = cv2.warpPerspective(image, h_ipersp, (target_BEV_W, target_BEV_H), borderValue=(32, 32, 32)) image_BEV = draw_grid(image_BEV, 20, 20, transp=0.9) for record, color in zip(records, colors): record = numpy.insert(record, 1, '0') record = numpy.insert(record, 1, '0') if draw_cuboids: points_2d = project_2D(mat_proj, get_cube_3D(record)) if (numpy.array(points_2d).min() < 0) or (numpy.array(points_2d).min() > W): continue lines_2d = numpy.array(points_cuboid_to_lines(points_2d)) points_2d_BEV = project_2D_BEV(points_2d[[2, 3, 6, 7]], h_ipersp) image_BEV = tools_draw_numpy.draw_contours( image_BEV, points_2d_BEV, color_fill=color.tolist(), color_outline=color.tolist(), transp_fill=0.25, transp_outline=0.75) else: points_2d = get_bbox(record) lines_2d = points_bbox_to_lines(points_2d) center_2d_BEV = default_2D_BEV(points_2d, h_ipersp)[0] image_BEV = tools_draw_numpy.draw_ellipse( image_BEV, (center_2d_BEV[0] - 10, center_2d_BEV[1] - 10, center_2d_BEV[0] + 10, center_2d_BEV[1] + 10), color.tolist(), transperency=0.75) #image_2d = tools_draw_numpy.draw_ellipse(image_2d,(center_2d_BEV[0]-10, center_2d_BEV[1]-10, center_2d_BEV[0]+10,center_2d_BEV[1]+10),color.tolist(),transperency=0.75) image_2d = tools_draw_numpy.draw_convex_hull(image_2d, points_2d, color.tolist(), transperency=0.75) image_2d = tools_draw_numpy.draw_lines(image_2d, lines_2d, color.tolist(), w=1) image_result = numpy.zeros((H, W + target_BEV_W, 3), dtype=numpy.uint8) image_result[:, :W] = image_2d image_result[:, W:] = image_BEV cv2.imwrite(folder_out + base_name + '.png', image_result) return
def plot_2D_scores(plt,fig,filename_data_pos,filename_data_neg,filename_data_grid,filename_scores_grid,th,noice_needed=0,caption='',filename_out=None): if not os.path.isfile(filename_data_pos): return if not os.path.isfile(filename_data_neg): return if not os.path.isfile(filename_data_grid): return data = tools_IO.load_mat(filename_scores_grid, dtype=numpy.chararray, delim='\t') data = data[1:,:] grid_scores = data[:, 1:].astype('float32') data = tools_IO.load_mat(filename_data_grid, dtype=numpy.chararray, delim='\t') data_grid = data[:,1:].astype('float32') data = tools_IO.load_mat(filename_data_pos, dtype=numpy.chararray, delim='\t') l1 = (data[:, 0]).astype('float32') x1 = data[:,1:].astype('float32') data = tools_IO.load_mat(filename_data_neg, dtype=numpy.chararray, delim='\t') l2 = (data[:, 0]).astype('float32') x2 = data[:,1:].astype('float32') X = numpy.vstack((x1,x2)) labels = numpy.hstack((l1, l2)).astype(int) X1 = X[labels > 0] X0 = X[labels <= 0] #''' max = numpy.max(grid_scores) min = numpy.min(grid_scores) for i in range(0,grid_scores.shape[0]): if(grid_scores[i]>th): grid_scores[i]=(grid_scores[i]-th)/(max-th) else: grid_scores[i] = (grid_scores[i] - th) / (th-min) #''' S=int(math.sqrt(grid_scores.shape[0])) grid_scores=numpy.reshape(grid_scores,(S,S)) minx=numpy.min(data_grid[:, 0]) maxx=numpy.max(data_grid[:, 0]) miny=numpy.min(data_grid[:, 1]) maxy=numpy.max(data_grid[:, 1]) if noice_needed>0: noice1 = 0.05-0.2*numpy.random.random_sample(X1.shape) noice0 = 0.05-0.2*numpy.random.random_sample(X0.shape) X1+=noice1 X0+=noice0 plt.set_title(caption) xx, yy = numpy.meshgrid(numpy.linspace(minx, maxx, num=S), numpy.linspace(miny, maxy,num=S)) plt.contourf(xx, yy, numpy.flip(grid_scores,0), cmap=cm.coolwarm, alpha=.8) #plt.imshow(grid_scores, interpolation='bicubic',cmap=cm.coolwarm,extent=[minx,maxx,miny,maxy],aspect='auto') plt.plot(X0[:, 0], X0[:, 1], 'ro', color='blue', alpha=0.4) plt.plot(X1[: ,0], X1[:, 1], 'ro' ,color='red' , alpha=0.4) plt.grid() plt.set_xticks(()) plt.set_yticks(()) #fig.subplots_adjust(hspace=0.001,wspace =0.001) if filename_out is not None: plt.savefig(filename_out) return