def analyze_super_pixels(self, sp_path, safe_path, experiment): ''' analyzes super pixels at given path @param sp_path: path to superpixels @param safe_path: path to store results @param experiment: experiment name ''' if os.path.isfile(safe_path + "/Statistics_" + experiment + ".csv"): print "Skipping {0} Analysis - it already exists".format( experiment) return self.log.start("Read SuperPixels from Experiment: {0}"\ .format(experiment), 1, 1) path_list = path_to_subfolder_pathlist(sp_path, filter=".mat") #self.sp_path_list = path_list sp_name_list = np.array([ os.path.splitext(basename(filepath))[0] for filepath in path_list ]) sp_array_list = np.array([ read_arr_from_matfile(filepath, 'superPixels') for filepath in path_list ]) self.log.update() self.log.start("Analyze SuperPixels from Experiment: {0}"\ .format(experiment), 1, 1) num_sp = np.array([len(np.unique(sp)) for sp in sp_array_list]) num_sp_arg = np.argsort(num_sp) stats_sum_file = open(safe_path + '/Stats_Sum_' + experiment + '.txt', 'a') s = "##################### Statistics for Experiment "+\ experiment + " #####################\n\n"+\ "Max SP Num: \t{0}\n".format(num_sp[num_sp_arg[-1]])+\ "Min SP Num: \t{0}\n".format(num_sp[num_sp_arg[0]])+\ "Mean SP Num: \t{0}\n".format(np.sum(num_sp)*1.0/len(num_sp)) stats_sum_file.write(s) stats_sum_file.close() csv.register_dialect("tab", delimiter="\t", quoting=csv.QUOTE_ALL) writer = csv.DictWriter(open( safe_path + "/Statistics_" + experiment + ".csv", "wb"), ["Image", "#SuperPixels"], dialect='excel-tab') writer.writerow({"Image": "Image", "#SuperPixels": "#SuperPixels"}) data = [{}] for i in range(len(num_sp)): data.append({"Image": sp_name_list[i], "#SuperPixels": num_sp[i]}) writer.writerows(data) self.log.update()
def relabel(global_path): ''' relabels EAW superpixels in the way, such that the array has #unique complete clusters @param global_path: path to EAW superpixels ''' path_list = utils.path_to_subfolder_pathlist(global_path, filter=".mat") relabeled_num = 0 print path_list log = utils.Logger(verbose=True) log.start("Relabeling EAW_SuperPixels", len(path_list), 1) for path in path_list: #print "relabeling {0}".format(path) folder = 'none' if (True): folder = basename(os.path.abspath(os.path.join(path, '..', '..'))) #im_folder = basename(os.path.abspath(os.path.join(path, '..'))) target_folder = os.path.abspath( os.path.join( path, '..', '..', '..', 'relabeled', folder, basename(os.path.abspath(os.path.join(path, '..'))))) if not os.path.isdir(target_folder): os.makedirs(target_folder) #print "Target Folder: {0}".format(target_folder) target_path = os.path.abspath( os.path.join(path, '..', '..', '..', 'relabeled', folder, target_folder, basename(path))) if os.path.isfile(target_path): log.update() continue #print "Target Path: {0}".format(target_path) arr = utils.read_arr_from_matfile(path, "ind") #print "Relabeling ... {0}".format(basename(path)) relabel_arr(arr) #print "Relabeled ... {0}".format(basename(path)) utils.write_arr_to_matfile(arr, target_path, "superPixels") print "Wrote ... {0}".format(basename(path)) else: print 'Failure in {0}'.format(folder) print 'Failure in image {0}'.format(basename(target_path)) raise if folder in [ 'index_4', 'index_5', 'index_6', 'index_7', 'index_8' ]: print 'Failure in {0}'.format(folder) print 'Failure in image {0}'.format(basename(target_path)) raise log.update() print "Relabeled files: {0}/{1}".format(relabeled_num, len(path_list))
def run(path,segments): ''' reads saliency files and converts them into heatmap images to location path/output/ @param path: path to saliency main folder @param segments: folder name to saliency files ''' #fh = utils.path_to_subfolder_pathlist("super_pixels",filter=".mat") spatial = utils.path_to_subfolder_pathlist(path + '/' + segments,filter=".mat") #f = [utils.read_arr_from_matfile(fh[i], "superPixels") for i in range(len(fh))] #s = [utils.read_arr_from_matfile(spatial[i],"S") for i in range(len(spatial))] if not os.path.exists('output'): os.makedirs('output') extent = [0,255,0,255] for i in range(len(spatial)): mat = utils.read_arr_from_matfile(spatial[i],"S") if os.path.exists(path + '/' + 'output/'+ basename(dirname(spatial[i])) + '/' + os.path.splitext(basename(spatial[i]))[0]+'.jpg'): im = np.array(Image.open(path + '/' + 'output/'+ basename(dirname(spatial[i])) + '/' + os.path.splitext(basename(spatial[i]))[0]+'.jpg')) (x,y,_) = im.shape (x1,y1) = mat.shape if (x == x1) & (y == y1): continue b = np.zeros(mat.shape) u = np.unique(mat) for j in range(len(u)): b[mat==u[j]] = j fig = plt.figure(frameon=False) fig.set_size_inches((mat.shape[1]+0.5)*1.0/100,(mat.shape[0]+0.5)*1.0/100) #fig.set_size_inches(2.56,2.56) ax = plt.Axes(fig, [0.,0.,1.,1.]) ax.set_axis_off() fig.add_axes(ax) ax.imshow(b, aspect='normal') folder = basename(dirname(spatial[i])) if not os.path.exists(path + '/' + 'output2/'+folder): os.makedirs(path + '/' + 'output2/'+folder) fig.savefig(path + '/' + 'output2/'+ folder + '/' + os.path.splitext(basename(spatial[i]))[0]+'.jpg') im2 = np.array(Image.open(path + '/' + 'output2/'+ basename(dirname(spatial[i])) + '/' + os.path.splitext(basename(spatial[i]))[0]+'.jpg')) (x2,y2,_) = im2.shape print '({0},{1} -- {2})'.format(x1-x2,y1-y2,basename(spatial[i])) plt.close() #print "({0},{1}) -- {2}".format(y1,x1,im2.shape) #print "{0}/{1}".format(i+1,len(spatial)) #plt.savefig('output/'+ os.path.splitext(basename(spatial[i]))[0]+'.jpg') print 'done'
def segment(in_folder, output_folder): ''' takes salience segments from in_folder and saves black and white images to output_folder ''' #fh = utils.path_to_subfolder_pathlist("super_pixels",filter=".mat") sal_path_list = utils.path_to_subfolder_pathlist(in_folder, filter=".mat") #f = [utils.read_arr_from_matfile(fh[i], "superPixels") for i in range(len(fh))] s = [ utils.read_arr_from_matfile(sal_path_list[i], "S") for i in range(len(sal_path_list)) ] #extent = [0,255,0,255] for i in range(len(s)): b = np.ones([s[i].shape[0], s[i].shape[1], 3], dtype=np.uint8) #b = np.zeros(s[i].shape) #u = np.unique(s[i]) #for j in range(len(u)): # b[s[i]==u[j]] = j #print 'shape is {0}'.format(b.shape) print "{0}/{1}".format(i + 1, len(s)) b = b * 255 b[:, :, 0] = b[:, :, 0] * s[i] b[:, :, 1] = b[:, :, 1] * s[i] b[:, :, 2] = b[:, :, 2] * s[i] fig = plt.figure(frameon=False) fig.set_size_inches((mat.shape[1] + 0.5) * 1.0 / 100, (mat.shape[0] + 0.5) * 1.0 / 100) ax = plt.Axes(fig, [0., 0., 1., 1.]) ax.set_axis_off() fig.add_axes(ax) ax.imshow(b, aspect='normal') fig.savefig(output_folder + '/' + os.path.splitext(basename(sal_path_list[i]))[0] + '.jpg') print "{0}/{1}".format(i + 1, len(s))
def eaw_val1(path, eaw_path, fl = [0,1,2], eaw=True): ''' before using eaw_val2, use this method use output of eaw_val1 as input for eaw_val2 returns path to experiments folder, eaw_path to scaling functions, folder_list of used scaling_functions, prob_path to labeling probabilities, prob_paths to labeling probabilities, weight_map which indicates scaling functions defined by superpixels @param path: path to experiments folder @param eaw_path: path to scaling functions @param fl: folder list (number array) to used scaling functions ''' #folder_list = [f for f in os.listdir(path)] if eaw: folder_list = np.array(['EAW_1','EAW_2','EAW_3','EAW_4']) else: folder_list = np.array([eaw_path]) eaw_folder = np.array(['level_summed1','level_summed2','level_summed3','level_summed4']) folder_list = folder_list[fl] eaw_folder = eaw_folder[fl] log = Logger(verbose=True) # log.start('Reading EAW Matrices', len(folder_list)*4, 1) #path to class probabilities prob_path = [path + '/' + folder_list[j] + '/Data/Base/MRF/SemanticLabels/R200K200TNN80-SPscGistCoHist-sc01ratio C00 B.5.1 S0.000 IS0.000 Pcon IPpot Seg WbS1' for j in range(len(folder_list))] prob_paths = [[p for p in path_to_subfolder_pathlist(prob_path[j], filter='.mat')] for j in range(len(folder_list))] weight_map = {} for i in range(len(folder_list)): weight_map[i] = {} log.start("Labeling EAW-Results", len(prob_paths[0]),1) #run over all test images for j in range(len(prob_paths[0])): #run over all experiments (eaw_x...eaw_y) for i in range(len(folder_list)): sp_path = path + '/' + folder_list[i] + '/' + 'Data/Descriptors/SP_Desc_k200/super_pixels' sp = read_arr_from_matfile(sp_path + '/' + os.path.basename( os.path.dirname(prob_paths[i][j])) + '/' + \ os.path.basename(prob_paths[i][j]),'superPixels') if eaw: weight_map[i][j] = read_arr_from_matfile(eaw_path + '/' +\ eaw_folder[i] +'/'+\ os.path.basename( os.path.dirname(prob_paths[i][j])) + '/' +\ os.path.basename(prob_paths[i][j]),'im') else: weight_map[i][j] = np.ones(sp.shape) log.update() return path, eaw_path, folder_list, prob_path, prob_paths, weight_map
def eaw_val2(input, method, f, bias): ''' use output from eaw_val1 as input for eaw_val2 validates weighting of labeling results by scaling functions weighting: method(weights,(f[i]*(bias[i]+i)) ... i indicates level of scaling function method can be 0 for exp, 1 for mult @param inp: input from eaw_val1: path, eaw_path, folder_list, prob_path, prob_paths, weight_map @param method: 0 or 1. 0:exponential function, 1:multiplication @param f: array of weighting values @param bias: array of bias values to normalize scaling indices i (e.g. to 1) ''' path = input[0] folder_list = input[2] prob_path = input[3] prob_paths = input[4] weight_map = input[5] try: f[1] except: l = len(weight_map.keys()) val = f f = np.zeros((l,1)) for i in range(l): f[i] = val try: bias[1] except: l = len(weight_map.keys()) val = bias bias = np.zeros((l,1)) for i in range(l): bias[i] = val weights = {} for j in range(len(prob_paths[0])): weights[j] = np.zeros((weight_map[0][j].shape[0],weight_map[0][j].shape[1], len(folder_list))) for j in range(len(prob_paths[0])): for i in weight_map.keys(): weights[j][:,:,i] = weight_map[i][j] for j in weights.keys(): for i in range(weights[j].shape[2]): if method == 0: weights[j][:,:,i] = weights[j][:,:,i]**(f[i]*(bias[i]+i)) elif method == 1: weights[j][:,:,i] = weights[j][:,:,i]*(f[i]*(bias[i]+i)) final_labels = {} for j in range(len(prob_paths[0])): ind = np.argmax(weights[j],axis = 2) final_labels[j] = np.zeros((weights[j].shape[0],weights[j].shape[1])) #reading the Labels calculated by SuperParsing for i in range(len(folder_list)): final_labels[j][ind == i] = read_arr_from_matfile(prob_paths[i][j], 'L')[ind == i] label_true = np.zeros((len(final_labels.keys()),len(object_labels_barcelona[1]))) label_num = np.zeros((len(final_labels.keys()),len(object_labels_barcelona[1]))) l_path = [path + '/' + folder_list[j] + '/SemanticLabels' for j in range(len(folder_list))] print '########################################' #log.start("Generating final labels",len(final_labels.keys()),1) for i in final_labels.keys(): or_labs = read_arr_from_matfile(l_path[0] + '/' + os.path.basename( os.path.dirname(prob_paths[0][i])) + '/' +\ os.path.basename(prob_paths[0][i]), 'S') #print os.path.basename(prob_paths[0][i]) #or_labs = read_arr_from_matfile(l_path[0] + '/' + prob_paths[0][i], 'S') u = np.unique(or_labs) for l in u: if l >0: mask = or_labs==l mask = mask[0:final_labels[i].shape[0],0:final_labels[i].shape[1]] #correct labeled pixels label_true[i][l-1] += len(final_labels[i][mask][(final_labels[i][mask]==l)].flatten()) #original pixel number label_num[i][l-1] += len(mask[mask].flatten()) return val_split(path, label_true, label_num)
def eaw_val1(path, eaw_path, fl = [0,1,2], eaw=True): ''' summarize ResultMRF from given experiments folder (path) ''' #folder_list = [f for f in os.listdir(path)] if eaw: folder_list = np.array(['EAW_1','EAW_2','EAW_3','EAW_4']) else: folder_list = np.array([eaw_path]) #folder_list = np.array(['eaw_4','eaw_5', 'eaw_6', 'eaw_7']) eaw_folder = np.array(['level_summed1','level_summed2','level_summed3','level_summed4']) #eaw_folder = np.array(['level_4','level_5','level_6','level_7']) #sp_folder = np.array(['index_4','index_5','index_6','index_7']) #eaw_sp = np.array([256,64,16,4]) folder_list = folder_list[fl] eaw_folder = eaw_folder[fl] #sp_folder = sp_folder[fl] #eaw_sp = eaw_sp[fl] log = Logger(verbose=True) # log.start('Reading EAW Matrices', len(folder_list)*4, 1) #path to class probabilities prob_path = [path + '/' + folder_list[j] + '/Data/Base/MRF/GeoLabels/R200K200TNN80-SPscGistCoHist-sc01ratio C00 B.5.1 S0.000 IS0.000 Pcon IPpot Seg WbS1' for j in range(len(folder_list))] prob_paths = [[p for p in path_to_subfolder_pathlist(prob_path[j], filter='.mat')] for j in range(len(folder_list))] # prob_paths = [[p for p in (prob_path[j]) if 'cache' not in p] # for j in range(len(folder_list))] #eaw_p = [eaw_path + '/' + eaw_folder[j] for j in range(len(folder_list))] final_labels = {} k = 1 weight_map = {} for i in range(len(folder_list)): weight_map[i] = {} log.start("Labeling EAW-Results", len(prob_paths[0]),1) #run over all test images for j in range(len(prob_paths[0])): #run over all experiments (eaw_x...eaw_y) for i in range(len(folder_list)): sp_path = path + '/' + folder_list[i] + '/' + 'Data/Descriptors/SP_Desc_k200/super_pixels' #sp_path = eaw_path + '/relabeled/' + sp_folder[i] #sp = read_arr_from_matfile(prob_paths[i][j],'superPixels') # pp = read_dict_from_matfile(sp_path + '/' + os.path.basename( # os.path.dirname(prob_paths[i][j])) + '/' + \ # os.path.basename(prob_paths[i][j])) sp = read_arr_from_matfile(sp_path + '/' + os.path.basename( os.path.dirname(prob_paths[i][j])) + '/' + \ os.path.basename(prob_paths[i][j]),'superPixels') #sp = read_arr_from_matfile(sp_path + '/' + prob_paths[i][j],'superPixels') #print os.path.basename(prob_paths[i][j]) if eaw: weight_map[i][j] = read_arr_from_matfile(eaw_path + '/' +\ eaw_folder[i] +'/'+\ os.path.basename( os.path.dirname(prob_paths[i][j])) + '/' +\ os.path.basename(prob_paths[i][j]),'im') else: weight_map[i][j] = np.ones(sp.shape) #weight_map[i][j] = np.zeros(sp.shape) ##### loading edge avoiding wavelet - scaling functions # eaw = [read_arr_from_matfile(eaw_p[i] + '/' + \ # os.path.splitext(prob_paths[i][j])[0] + '_k_' + str(l) + \ # '.mat','im') for l in (np.array(range(eaw_sp[i]))+1)] # for u in np.unique(sp): # # weight_map[i][j][sp == u] = \ # eaw[u-1][sp == u] #weight_map[i][j] = weight_map[i][j]**(i+4) log.update() return path, eaw_path, folder_list, prob_path, prob_paths, weight_map
def eaw_val2(input, method, f, bias): ''' method can be 0 for exp, 1 for mult ''' path = input[0] eaw_path = input[1] folder_list = input[2] prob_path = input[3] prob_paths = input[4] weight_map = input[5] try: f[1] except: l = len(weight_map.keys()) val = f f = np.zeros((l,1)) for i in range(l): f[i] = val try: bias[1] except: l = len(weight_map.keys()) val = bias bias = np.zeros((l,1)) for i in range(l): bias[i] = val weights = {} #for i in range(len(folder_list)): # #weights[i] = {} for j in range(len(prob_paths[0])): weights[j] = np.zeros((weight_map[0][j].shape[0],weight_map[0][j].shape[1], len(folder_list))) #method_list = ['expmult','expexp','multmult','multexp'] for j in range(len(prob_paths[0])): for i in weight_map.keys(): weights[j][:,:,i] = weight_map[i][j] for j in weights.keys(): for i in range(weights[j].shape[2]): if method == 0: weights[j][:,:,i] = weights[j][:,:,i]**(f[i]*(bias[i]+i)) elif method == 1: weights[j][:,:,i] = weights[j][:,:,i]*(f[i]*(bias[i]+i)) # if m == 0: # weights[j][:,:,i] = weights[j][:,:,i]**(smooth*(k+f*i)) # #elif m == 1: # # weights[j][:,:,i] = weights[j][:,:,i]**(smooth**(k+f*i)) # elif m == 2: # weights[j][:,:,i] = weights[j][:,:,i]*(smooth*(k+f*i)) # #elif m == 3: # # weights[j][:,:,i] = weights[j][:,:,i]*(smooth**(k+f*i)) final_labels = {} for j in range(len(prob_paths[0])): ind = np.argmax(weights[j],axis = 2) final_labels[j] = np.zeros((weights[j].shape[0],weights[j].shape[1])) #reading the Labels calculated by SuperParsing for i in range(len(folder_list)): final_labels[j][ind == i] = read_arr_from_matfile(prob_paths[i][j], 'L')[ind == i] #final_labels[j][ind == i] = read_arr_from_matfile(prob_path[i] + '/' + prob_paths[i][j], 'L')[ind == i] #log.update() label_true = np.zeros((len(final_labels.keys()),len(object_labels_barcelona[1]))) label_num = np.zeros((len(final_labels.keys()),len(object_labels_barcelona[1]))) l_path = [path + '/' + folder_list[j] + '/GeoLabels' for j in range(len(folder_list))] print '########################################' #log.start("Generating final labels",len(final_labels.keys()),1) for i in final_labels.keys(): or_labs = read_arr_from_matfile(l_path[0] + '/' + os.path.basename( os.path.dirname(prob_paths[0][i])) + '/' +\ os.path.basename(prob_paths[0][i]), 'S') #print os.path.basename(prob_paths[0][i]) #or_labs = read_arr_from_matfile(l_path[0] + '/' + prob_paths[0][i], 'S') u = np.unique(or_labs) for l in u: if l >0: mask = or_labs==l mask = mask[0:final_labels[i].shape[0],0:final_labels[i].shape[1]] #print l #print i #print mask.shape #print final_labels[i].shape #final_labels[i][mask] #print np.unique[] #correct labeled pixels label_true[i][l-1] += len(final_labels[i][mask][(final_labels[i][mask]==l)].flatten()) #original pixel number label_num[i][l-1] += len(mask[mask].flatten()) #log.update() #log.start("Validating",1,1) return val_split(path, label_true, label_num)
def eaw_val1(path, eaw_path, fl=[0, 1, 2, 4]): ''' summarize ResultMRF from given experiments folder (path) ''' #folder_list = [f for f in os.listdir(path)] folder_list = np.array(['eaw_1', 'eaw_2', 'eaw_3', 'eaw_4']) eaw_folder = np.array(['level_1', 'level_2', 'level_3', 'level_4']) sp_folder = np.array(['index_1', 'index_2', 'index_3', 'index_4']) eaw_sp = np.array([4, 16, 64, 256]) folder_list = folder_list[fl] eaw_folder = eaw_folder[fl] sp_folder = sp_folder[fl] eaw_sp = eaw_sp[fl] # folder_list = ['eaw_4','eaw_5', 'eaw_6'] # eaw_folder = ['level_4','level_5','level_6'] # sp_folder = ['index_4','index_5','index_6'] # eaw_sp = [256,64,16] log = Logger(verbose=True) # log.start('Reading EAW Matrices', len(folder_list)*4, 1) prob_path = [ path + '/' + folder_list[j] + '/Data/Base/MRF/SemanticLabels/R200K200TNN80-SPscGistCoHist-sc01ratio C00 B.5.1 S0.000 IS0.000 Pcon IPpot Seg WbS1' for j in range(len(folder_list)) ] prob_paths = [[p for p in os.listdir(prob_path[j]) if 'cache' not in p] for j in range(len(folder_list))] eaw_p = [eaw_path + '/' + eaw_folder[j] for j in range(len(folder_list))] final_labels = {} k = 1 weight_map = {} for i in range(len(folder_list)): weight_map[i] = {} log.start("Labeling EAW-Results", len(prob_paths[0]), 1) for j in range(len(prob_paths[0])): for i in range(len(folder_list)): sp_path = eaw_path + '/relabeled/' + sp_folder[i] sp = read_arr_from_matfile(sp_path + '/' + prob_paths[i][j], 'superPixels') weight_map[i][j] = np.zeros(sp.shape) eaw = [read_arr_from_matfile(eaw_p[i] + '/' + \ os.path.splitext(prob_paths[i][j])[0] + '_k_' + str(l) + \ '.mat','im') for l in (np.array(range(eaw_sp[i]))+1)] for u in np.unique(sp): weight_map[i][j][sp == u] = \ eaw[u-1][sp == u] #weight_map[i][j] = weight_map[i][j]**(i+4) log.update() return path, eaw_path, folder_list, prob_path, prob_paths, weight_map