def db_generate_heatmap(res_x, res_y, height, width, url): pts = [] page = Page.objects.get(url=url, res_x=int(res_x), res_y=int(res_y), width=int(width), height=int(height)) clicks = ClickItem.objects.all().filter(page=page) for item in clicks: pts.append((item.x, page.height - item.y)) hm = heatmap.Heatmap() img = hm.heatmap(pts, area=((0, 0), (page.width, page.height)), size=(page.width, page.height), dotsize=50) #save file to media url_id = base64.urlsafe_b64encode(page.url) output = StringIO.StringIO() img.save(output, 'PNG') f1 = ContentFile(output.getvalue()) default_storage.save('%s.png' % (url_id), f1) output.close() #img.save('%s@%dx%d.png' % (url,int(res_x),int(res_y))) #img.save('cas.png') #response = HttpResponse(mimetype="image/png") #img.save(response,'PNG') return
def plot_abun(): content = request.get_json(force=True) feature_table = content['feature_table'] log_flag = content['log_flag'] abun_type = content['abun_type'] # abun abun_div_and_dict = stat_abundance.plot_stat_abun(feature_table, abun_type, log_flag) abun_div = abun_div_and_dict[0] cols = [ele for ele in abun_div_and_dict[1]] # heatmap metadata = content['metadata'] feature_table = content['feature_table'] features = [content['feature0'], content['feature1'], content['feature2']] heatmap_instance = heatmap.Heatmap(metadata, feature_table) heatmap_instance.map() heatmap_instance.sort_by_features(features[0], features[1], features[2]) heatmap_instance.obtain_numerical_matrix(cols) heatmap_div = heatmap_instance.plotly_div() # annotation taxo_file = content['taxonomy_file'] ann = annotation.Annotation(cols, feature_table, taxo_file) ann_div = ann.plot_annotation() result = {0: abun_div, 1: ann_div, 2: heatmap_div} return jsonify(result)
def tokenheatmap(test_num, seed): hm = heatmap.Heatmap() tokenList = [] for x in xrange(0, test_num): signal.alarm(20) tokenList += graptokens(seed) hm.heatmap(tokenList, 50, 120, (2048, 2048), 'classic', ((0, 0), (300, 300))).save("token_heat_map.png")
def returnHeatmap(self,fromTick=0,toTick=-1): pts = self.returnPts(fromTick,toTick) if len(pts) == 0: return else: hm = heatmap.Heatmap() img = hm.heatmap(pts,dotsize=20,size=(512,512),area=((0,0),(self.luaScenarioInfo["size"][1.0],self.luaScenarioInfo["size"][2.0]))) return img.tostring("raw","BGRA",0,-1)
def create_heatmap(data, bkgrnd, output): """ #function to plot data on heatmap """ hm = heatmap.Heatmap() fg = hm.heatmap(data, dotsize=25, size=dim, area=((0, 0), dim)) bg = Image.open(bkgrnd + ".png") bg.paste(fg, (0, 0), fg) bg.save("img/" + output + "_heatmap.png")
def use_heatmap(image, box_centers): import heatmap hm = heatmap.Heatmap() box_centers = [(i, image.shape[0] - j) for i, j in box_centers] img = hm.heatmap(box_centers, dotsize=200, size=(image.shape[1], image.shape[0]), opacity=128, area=((0, 0), (image.shape[1], image.shape[0]))) return img
def use_heatmap(image, box_centers): import heatmap hm = heatmap.Heatmap() box_centers = [(i, image.shape[0] - j) for i, j in box_centers] img = hm.heatmap(box_centers, dotsize=10, size=(image.shape[1], image.shape[0]), opacity=40, area=((0, 0), (image.shape[1], image.shape[0]))) img.save('hm.png')
def generate_location_vis(self): LOCATION_PROBE = 'edu.mit.media.funf.probe.builtin.LocationProbe' LocTuple = namedtuple('LocTuple', ['lat', 'long', 'time']) location_data = [] self.movement_location_data = [] self.locationScans = 0 self.last_loc_long = 0 self.last_loc_lat = 0 #Location Data c = self.conn.cursor() t = (LOCATION_PROBE, ) c.execute('select * from data where probe = ? ORDER BY timestamp', t) i = 0 last_date = 0 for row in c: i += 1 data = row[4] data = data.replace("true", "True") data = data.replace("false", "False") data_dict = ast.literal_eval(data.rstrip()) if int(data_dict["timestamp"]) - last_date >= 60: location_data.append( [data_dict['mLongitude'], data_dict['mLatitude']]) last_date = int(data_dict["timestamp"]) self.movement_location_data.append( LocTuple(data_dict['mLongitude'], data_dict['mLatitude'], datetime.fromtimestamp(float( data_dict["timestamp"])))) print self.movement_location_data[-1].time self.locationScans = i if len(location_data) > 0: self.last_loc_lat = location_data[-1][1] self.last_loc_long = location_data[-1][0] hm = heatmap.Heatmap() #Check if there is data if i > 0: hm.heatmap(location_data, "classic.png", dotsize=50) else: hm.heatmap([[0, 0]], "classic.png", dotsize=50) hm.saveKML("most_visited.kml") hm.saveKMZ("most_visited") #Try movement kml kml_movement.main(self.movement_location_data) else: self.last_loc_lat = "42.36125551" self.last_loc_long = "-71.08763113" print "Done with location"
def djikstra(customers, motw): heatmaps = [] for custom in customers: heatmap = hm.Heatmap(custom, motw, customers) heatmaps.append(heatmap) explore(heatmap) #for i in range (len(heatmap.map)): # for el in heatmap.map[i]: # print (el.prize_up_to,) # print() return heatmaps
def use_heatmap(image, box_centers): import heatmap hm = heatmap.Heatmap() box_centers = [(i, image.shape[0] - j) for i, j in box_centers] #print hm.schemes() img = hm.heatmap(box_centers, dotsize=40, size=(image.shape[1], image.shape[0]), opacity=150, scheme='classic', area=((0, 0), (image.shape[1], image.shape[0]))) return img
def t_heatmap(): import heatmap import random pts = [] for x in range(400): pts.append((random.random(), random.random())) hm = heatmap.Heatmap() img = hm.heatmap(pts) img.save("classic.png") return
def main(): parser = argparse.ArgumentParser() parser.add_argument('-i', '--image', default='data/gtasa-blank-1.0.jpg', help="path to Ian Albert's gtasa-blank-1.0.jpg map") parser.add_argument('-o', '--imageout', default='data/', help="directory to write output images to") parser.add_argument( '-l', '--hmlibpath', default='C:/Anaconda3/Lib/site-packages/heatmap/cHeatmap-x64.dll', help="directory to write output images to") args = parser.parse_args() # Initialise PIL stuff font = ImageFont.truetype("arial.ttf", 32) mapimg = Image.open(args.image) draw = ImageDraw.Draw(mapimg) hm = heatmap.Heatmap(libpath=args.hmlibpath) # Generate paths map_path = os.path.join(args.imageout, "gtasa-blank-1.0-ss-map.jpg") hm_loot = os.path.join(args.imageout, "gtasa-blank-1.0-ss-loot.jpg") hm_vehicle = os.path.join(args.imageout, "gtasa-blank-1.0-ss-vehicles.jpg") hm_object = os.path.join(args.imageout, "gtasa-blank-1.0-ss-objects.jpg") # Load SS data regions = Regions("data/regions.json") loot = LootSpawns("../gamemodes/sss/world/zones/") vehicles = VehicleSpawns("../scriptfiles/vspawn/") objs = ObjectSet("../scriptfiles/Maps/") # Generate the main map with region lines, loot spawns and vehicles draw_loot(mapimg, draw, loot) draw_vehicles(mapimg, draw, vehicles) draw_regions(mapimg, draw, regions, font) mapimg.save(map_path) # generate heatmaps for loot and vehicles on separate images generate_loot_heatmap(copy.copy(mapimg), draw, hm, loot, hm_loot) generate_vehicle_heatmap(copy.copy(mapimg), draw, hm, vehicles, hm_vehicle) generate_obj_heatmap(copy.copy(mapimg), draw, hm, objs, hm_object)
def make_heatmap(l_dict): pts = [] c = 0 for tname, loc_list in l_dict.iteritems(): for i in range(len(loc_list)): loc = loc_list[i] pts.append([loc.lat, loc.long]) c += 1 if c == 10: break #break; hm = heatmap.Heatmap() hm.heatmap(pts, "classic.png") hm.saveKML("10_ppl_5_days.kml")
def run_osea(taxonomy_file, feature_table,metadata_file,obj_col,set_level, \ test_method_name='t_test',per_num=1000): """Generate an osea instance ,and compute it's Enrichment scores and pvalue. Args: obj_col:object column,which can be seen as a 0-1 label,dividing the samples into two groups. set_level: it it a string "Phylum", "Genus", and so on . per_num :permutation number Return: final_result: it is a dict object like {set:{ES: , pvalue: }} """ heatmap_instance = heatmap.Heatmap(metadata_file,feature_table) heatmap_instance.map() part1, part2 = stats_test.choose_two_class(heatmap_instance.df,obj_col) part1 = part1[heatmap_instance.df_primary_col] part2 = part2[heatmap_instance.df_primary_col] rank_list = obtain_rank_list(part1,part2,test_method_name) osea_real = OSEA.OSEA(rank_list,Taxon_file=taxonomy_file,set_level=set_level) ### permutation many times to generate the null distribution enrichment_scores = [] # tmp_df = heatmap_instance.df[heatmap_instance.df_primary_col] # TODO we can use pool here to accerate the program.(done) # multiprocess by using Pool() with Pool() as p: rank_lists = p.starmap(permutation_to_obtain_ranklist, \ [[tmp_df, i, test_method_name] for i in range(per_num)]) for rank_list in rank_lists: es = osea_real.get_ES(rank_list) enrichment_scores.append(es) """ single process code for i in range(per_num): rank_list = OSEA.permutation_to_obtain_ranklist(tmp_df,test_method_name) es = osea_real.get_ES(rank_list) enrichment_scores.append(es) """ ### get the null distribution for every set set_enrichment_score = {} for key in enrichment_scores[0]: set_enrichment_score[key] = [] for i in range(per_num): set_enrichment_score[key].append(enrichment_scores[i][key]) ### compute the p-value final_result = {} for ele in set_enrichment_score: distribution = OSEA.generate_distribution(set_enrichment_score[ele]) pvalue = OSEA.p_value(osea_real.es[ele],distribution) final_result[ele]={'ES':osea_real.es[ele],'pvalue':pvalue} return final_result
def heat_map(): content = request.get_json(force=True) # content is filename string metadata = content['metadata'] feature_table = content['feature_table'] features = [content['feature0'], content['feature1'], content['feature2']] ID_num = int(content['node_num']) #prevalence = content['prevalence'] #abundance = content['abundance'] #variance = content ['variance'] try: f = open('MVP/pickles/' + metadata.split('/')[-1] + '_heatmap.pickle', 'rb') heatmap_instance = pickle.load(f) print('read heatmap from pickle') f.close() except: heatmap_instance = heatmap.Heatmap(metadata, feature_table) heatmap_instance.map() with open('MVP/pickles/' + metadata.split('/')[-1] + '_heatmap.pickle', 'wb') as g: pickle.dump(heatmap_instance, g) print('write heatmap to pickle') #heatmap_instance.filter(prevalence_threshold=prevalence,abundance_num=abundance,variance_num=variance) heatmap_instance.map() heatmap_instance.sort_by_features(features[0], features[1], features[2]) try: f = open('MVP/pickles/' + metadata.split('/')[-1] + '_mvp_tree.pickle', 'rb') mvp_tree = pickle.load(f) print('read mvp_tree from pickle') mvp_tree.get_subtree(ID_num) cols = [ele.name for ele in mvp_tree.subtree.get_terminals()] f.close() except: string_ = 'there are no pickles to read.please try plot_tree button' result = {0: string_} return jsonify(result) heatmap_instance.obtain_numerical_matrix(cols) show_label = content['show_label'] if show_label == 'show': # show metadata besides the heatmap or not show_label = True else: show_label = False heatmap_div = heatmap_instance.plotly_div(show_label) result = {0: heatmap_div} return jsonify(result)
def generate_obj_heatmap(im, draw, objs): points = [] for l in objs: points.append([int(l.x + 3000), int(l.y + 3000)]) hm = heatmap.Heatmap(libpath="C:\\Python34\\Lib\\site-packages\\heatmap\\cHeatmap-x86.dll") hmimg = hm.heatmap( points, dotsize=150, size=(6000, 6000), scheme='classic', area=((0, 0), (6000, 6000))) im.paste(hmimg, mask=hmimg) im.save("object-heatmap.jpg")
def heatmapper(UltraList): save_map_memory(UltraList) UltraList = get_map_mem() hm = heatmap.Heatmap() img = hm.heatmap(UltraList, area=((0, 0), (1280, 720)), size=(1280, 720), dotsize=50) # print("heatmap created") overlay = img # background = Image.new("RGBA", (1280, 720), "black") overlay = overlay.convert("RGBA") # Image.alpha_composite(background, overlay).save("Current_Map"+".png","PNG") overlay.save("Current_Map" + ".png", "PNG") overlay.save("Current_Map2" + ".png", "PNG")
def generate_vehicle_heatmap(im, draw, vehicles): points = [] for l in vehicles: points.append([int(l.x + 3000), int(l.y + 3000)]) hm = heatmap.Heatmap( libpath="C:\\Python34\\Lib\\site-packages\\heatmap\\cHeatmap-x86.dll") hmimg = hm.heatmap(points, dotsize=300, size=(6000, 6000), scheme='classic', area=((0, 0), (6000, 6000))) im.paste(hmimg, mask=hmimg) im.save("gtasa-blank-1.0-ss-map-heat-vehicle.jpg")
def heatmapper(UltraList, num): print("Processing points...") print(UltraList) hm = heatmap.Heatmap() print("heatmapper loaded") img = hm.heatmap(UltraList) print("heatmap created") overlay = img background = Image.open('bk.jpg') background = background.convert("RGBA") overlay = overlay.convert("RGBA") Image.blend(background, overlay, 0.4).save("result" + str(num) + ".png", "PNG") print("overlay comepleted") print(UltraList)
def get_heatmap(image_file, image_fixations): X = cv2.imread(image_file) orig_shape = np.copy(X.shape) npad = ((0, 513 - X.shape[0]), (0, 513 - X.shape[1]), (0, 0)) #print(X.shape) img = np.pad(X, pad_width=npad, mode='constant', constant_values=0) diag = math.sqrt(img.shape[0]**2 + img.shape[1]**2) * 0.02 fin_out = [[x[1], x[2]] for x in image_fixations] values = np.asarray(fin_out) neighbors = np.zeros((values.shape[0])) selPoints = np.empty((1, 2)) for i in range(values.shape[0]): diff = np.sqrt(np.sum(np.square(values - values[i]), axis=1)) neighbors[i] = np.sum(diff < diag) for i in range(values.shape[0]): if neighbors[i] > 0.05 * values.shape[0]: selPoints = np.append(selPoints, values[i:i + 1, :], axis=0) selPoints = selPoints[1:, :] selPoints[:, [0, 1]] = selPoints[:, [1, 0]] selPoints = selPoints.astype(int) hm = heatmap.Heatmap() ar = ((0, 0), (img.shape[1], img.shape[0])) si = (img.shape[1], img.shape[0]) ds = int(75 * ((img.shape[0] + img.shape[1]) / 875.0)) selPoints[:, 1] = img.shape[0] - selPoints[:, 1] heatMap = hm.heatmap(selPoints, area=ar, size=si, dotsize=ds, opacity=500) #To overlay on image heatMap = np.asarray(heatMap) indMap = heatMap[:, :, 3] == 0 heatMap = heatMap[:, :, 0:3] sup = cv2.addWeighted(img, 0.4, heatMap, 0.6, 0) sup[indMap] = img[indMap] sup = sup[:orig_shape[0], :orig_shape[1], ::-1] fig = plt.figure(frameon=False) ax = plt.Axes(fig, [0., 0., 1., 1.]) ax.set_axis_off() ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) fig.add_axes(ax) plt.axis('off') plt.imshow(sup) plt.savefig('temp_heatmap.png', bbox_inches='tight', pad_inches=0)
def main(): shift = 20 splats_file = sys.argv[1] map_prev_file = sys.argv[2] with open(splats_file) as f: splats = json.load(f) preview = Image.open(map_prev_file) size = preview.size pts = [(p['x'] + shift, (size[1] - (p['y'] + shift))) for p in splats] hm = heatmap.Heatmap() img = hm.heatmap(pts, dotsize=200, size=size, scheme='classic', area=((0, 0), size)) img.save('classic.png')
def update_after_move(self, complete=True): if self.get_control(ct.K_HEATMAP).get(): heatmap = hm.Heatmap(self.game, self.bots[self.game.turn]) else: heatmap = None self.board.draw(heatmap, complete) self.window.refresh() # reset progress self.update_progress() self.update_turn_indicators() self.update_history() if self.get_control(ct.K_SHOW_EVALUATION).get(): self.update_evals()
def test(): x = ReplayManager(argv[1]) x.StartPlayback() treemodels = [] for key,value in x.StringSets[3].items(): if '/props/trees/' in value: treemodels.append(int(key)) #skip all initall entities states x.NextFrame() pts = [] while x.NextFrame(): for id in x.addedentities: if x.EntityPool[id].typedesc[0] == 'Prop_Dynamic' and x.EntityPool[id]['m_hModel'] in treemodels: pts.append((x.EntityPool[id]['m_v3Position'][0]/32,x.EntityPool[id]['m_v3Position'][1]/32)) hm = heatmap.Heatmap() pts.extend([(0,0),(511,511)]) hm.heatmap(pts, "heatmap.png",size=(512,512),dotsize=15)
def test(): x = ReplayManager(argv[1]) x.StartPlayback() madmanid = None pts = [] while not madmanid: x.NextFrame() for id in [ id for id in x.addedentities if x.EntityPool[id].typedesc[0] == 'Hero' ]: if 'scar' in x.StringSets[3][str( x.EntityMap[x.EntityPool[id].EntityIndex])]: madmanid = id while x.NextFrame(): #m_yStatus == 0 -- dead if x.EntityPool[madmanid]['m_yStatus'] != 0: pts.append((x.EntityPool[madmanid]['m_v3Position.xy'][0] / 32, x.EntityPool[madmanid]['m_v3Position.xy'][1] / 32)) hm = heatmap.Heatmap() pts.extend([(0, 0), (511, 511)]) hm.heatmap(pts, "madman.png", size=(512, 512), dotsize=15)
def plot_stats_test(): content = request.get_json(force=True) label_col = content['label_col'] metadata = content['metadata'] feature_table = content['feature_table'] test_method = content['stats_method'] taxonomy = content['taxonomy'] #features = [content['feature0'],content['feature1'],content['feature2']] heatmap_instance = heatmap.Heatmap(metadata, feature_table) heatmap_instance.map() part1, part2 = stats_test.choose_two_class(heatmap_instance.df, label_col) part1 = part1[heatmap_instance.df_primary_col] part2 = part2[heatmap_instance.df_primary_col] test_result = stats_test.perform_test(part1, part2, test_method) try: with open('MVP/pickles/'+taxonomy.split('/')[-1]+\ '_annotation.pickle','rb') as f: ann = pickle.load(f) colors = ann.colors color_index = ann.mapped_phylum_colors print('read annotation pickles by stats test') except: colors = None color_index = None try: with open('MVP/pickles/'+metadata.split('/')[-1]+\ '_mvp_tree.pickle','rb') as f: mvp_tree = pickle.load(f) cols = [ele.name for ele in mvp_tree.feature_tree.get_terminals()] except: cols = None #print(colors) div_str = stats_test.plot_result_dict(test_result, cols, taxonomy, colors, color_index) result = {0: div_str} return jsonify(result)
def plot_dim_reduce(): content = request.get_json(force=True) metadata = content['metadata'] feature_table = content['feature_table'] obj_col = content['obj_col'] # new buttons n_component = int(content['n_component']) method = content['method'] flag_3d = content['flag_3d'] #print(type(n_component)) #print(n_component) heatmap_instance = heatmap.Heatmap(metadata, feature_table) heatmap_instance.map() labels = heatmap_instance.df[obj_col] matrix = heatmap_instance.df[heatmap_instance.df_primary_col] reduced = dimension_reduce.reduce_dimension(matrix, n_component, method) for ele in reduced: print(len(ele)) break div = dimension_reduce.dimension_reduce_visualize(reduced, labels, flag_3d) result = {0: div} return jsonify(result)
import heatmap import sys """ download heatmap package from http://jjguy.com/heatmap/ """ f = open('sample1.coord').read().split('\n') pts = [] for line in f: coords = line.strip().split('\t') if len(coords) < 2: continue pts.append((float(coords[1]), float(coords[0]))) hm = heatmap.Heatmap() img = hm.heatmap(pts, dotsize=3) hm.saveKML("heatmapjjguy.kml")
def setUp(self): self.heatmap = heatmap.Heatmap()
def plot_tree(): content = request.get_json(force=True) tree = content['tree_file'] #tree_type = content['tree_type'] #file_type = content['file_type'] ID_num = int(content['node_num']) feature_table = content['feature_table'] taxo_file = content['taxonomy_file'] metadata = content['metadata'] #tree = circular_tree.read_tree(tree_file,file_type) try: f = open('MVP/pickles/' + metadata.split('/')[-1] + '_mvp_tree.pickle', 'rb') mvp_tree = pickle.load(f) print('read mvp_tree from pickle') f.close() except: mvp_tree = corr_tree_new.MvpTree(feature_table, tree, metadata, taxo_file, ID_num) file_paras = { 'feature_table': feature_table, 'metadata': metadata, 'taxonomy': taxo_file, 'tree': tree } with open('MVP/pickles/files.pickle', 'wb') as f: pickle.dump(file_paras, f) with open( 'MVP/pickles/' + metadata.split('/')[-1] + '_mvp_tree.pickle', 'wb') as g: pickle.dump(mvp_tree, g) print('wirte mvp_tree to pickle') mvp_tree.get_subtree(ID_num) cols = [ele.name for ele in mvp_tree.subtree.get_terminals()] # plot_anno ann = annotation.Annotation(cols, feature_table, taxo_file) ann_div = ann.plot_annotation() with open('MVP/pickles/' + taxo_file.split('/')[-1] + '_annotation.pickle', 'wb') as f: pickle.dump(ann, f) mvp_tree.get_colors(ann.colors, ann.mapped_phylum_colors) tree_div = mvp_tree.plot_tree() #plot_heatmap features = [content['feature0'], content['feature1'], content['feature2']] try: f = open('MVP/pickles/' + metadata.split('/')[-1] + '_heatmap.pickle', 'rb') heatmap_instance = pickle.load(f) print('read heatmap from pickle') f.close() except: heatmap_instance = heatmap.Heatmap(metadata, feature_table) heatmap_instance.map() with open('MVP/pickles/' + metadata.split('/')[-1] + '_heatmap.pickle', 'wb') as g: pickle.dump(heatmap_instance, g) print('write heatmap to pickle') heatmap_instance.sort_by_features(features[0], features[1], features[2]) heatmap_instance.obtain_numerical_matrix(cols) show_label = content['show_label'] if show_label == 'show': # show metadata besides the heatmap or not show_label = True else: show_label = False heatmap_div = heatmap_instance.plotly_div(show_label) # total result = {0: tree_div, 1: ann_div, 2: heatmap_div} return jsonify(result)
def get_heatmap(map_info, dot_size=150, opacity=255): hm = heatmap.Heatmap() # schemes: classic, fire, omg, pbj, pgaitch img = hm.heatmap(points=_adapt(map_info), dotsize=max(int(dot_size), 1), opacity=opacity, size=map_info.size, scheme='fire', area=((0, 0), map_info.size)) return img