def take_snapshot(self, file_html=None, width="1400px", height="580px"): """获取快照 :param file_html: str 交易快照保存的 html 文件名 :param width: str 图表宽度 :param height: str 图表高度 :return: """ tab = Tab(page_title="{}@{}".format( self.symbol, self.end_dt.strftime("%Y-%m-%d %H:%M"))) for freq in self.freqs: chart = ka_to_echarts(self.kas[freq], width, height) tab.add(chart, freq) t1 = Table() t1.add(["名称", "数据"], [[k, v] for k, v in self.s.items() if "_" in k]) t1.set_global_opts( title_opts=ComponentTitleOpts(title="缠中说禅信号表", subtitle="")) tab.add(t1, "信号表") t2 = Table() t2.add(["名称", "数据"], [[k, v] for k, v in self.s.items() if "_" not in k]) t2.set_global_opts( title_opts=ComponentTitleOpts(title="缠中说禅因子表", subtitle="")) tab.add(t2, "因子表") if file_html: tab.render(file_html) else: return tab
def multiple(): tab = Tab() tab.add(geo.set_map("confirmed",200000), "confirmed") tab.add(geo.set_map("deaths",30000), "deaths") tab.add(geo.set_map("recovered",90000), "recovered") tab.render(output_path+"tab_base.html") return True
def take_snapshot(self, file_html=None, width="1400px", height="580px"): """获取快照 :param file_html: str 交易快照保存的 html 文件名 :param width: str 图表宽度 :param height: str 图表高度 :return: """ tab = Tab(page_title="{}@{}".format(self.symbol, self.end_dt.strftime("%Y-%m-%d %H:%M"))) for freq in self.freqs: chart = self.kas[freq].to_echarts(width, height) tab.add(chart, freq) t1 = Table() t1.add(["名称", "数据"], [[k, v] for k, v in self.s.items() if "_" in k and isinstance(v, str) and v not in ["Other~其他", "向下", 'Y~是', 'N~否', '向上']]) t1.set_global_opts(title_opts=ComponentTitleOpts(title="缠中说禅信号表", subtitle="")) tab.add(t1, "信号表") t2 = Table() ths_ = [["同花顺F10", "http://basic.10jqka.com.cn/{}".format(self.symbol[:6])]] t2.add(["名称", "数据"], [[k, v] for k, v in self.s.items() if "_" not in k and v != "Other~其他"] + ths_) t2.set_global_opts(title_opts=ComponentTitleOpts(title="缠中说禅因子表", subtitle="")) tab.add(t2, "因子表") if file_html: tab.render(file_html) else: return tab
def test_tab_base(fake_writer): bar = _create_bar() line = _create_line() tab = Tab().add(bar, "bar-example").add(line, "line-example") tab.render() _, content = fake_writer.call_args[0] assert_in("bar-example", content) assert_in("line-example", content)
def Tab_WordCloud(): tab = Tab() tab.add(WordCloud_douban_high(), "豆瓣高分词云") tab.add(WordCloud_douban_low(), "豆瓣低分词云") tab.add(WordCloud_maoyan_high(), "猫眼高分词云") tab.add(WordCloud_maoyan_low(), "猫眼低分词云") return tab
def class_sum_by_subject() -> Tab: tab_ = Tab() tab_content = add_bar() for content, name in zip(tab_content, subjectName_list): print(content) print(name) tab_.add(content, name) return tab_
def tab_charts(chart_list, name_list): clen = len(chart_list) nlen = len(name_list) if clen == nlen: tab = Tab() for c in range(clen): tab.add(chart_list[c], name_list[c]) return tab else: print('请确保图表代码与图表名称一一对应')
def test_tab_render_notebook(): tab = Tab() tab.add(_create_line(), "line-example") tab.add(_create_bar(), "bar-example") tab.add(_create_table(), "table-example") html = tab.render_notebook().__html__() assert_in("City name", html)
def get_figure_area_all(): tab_area = Tab(page_title='Line-Area') tab_area.add(area_line(label_name='累计确诊', column_name='confirm'), 'COVID-19各地区病例趋势图(cn当日累计确诊病例)') tab_area.add(area_line(label_name='现有确诊', column_name='now_confirm'), 'COVID-19各地区病例趋势图(cn当日现有确诊病例)') tab_area.add(area_line(label_name='累计治愈', column_name='heal'), 'COVID-19各地区病例趋势图(cn当日累计治愈病例)') tab_area.add(area_line(label_name='累计死亡', column_name='dead'), 'COVID-19各地区病例趋势图(cn当日累计死亡病例)') return tab_area
def tab_user_analysis_base(): """ It is used to respond to requests for chart parameters. Returns ------- tab : TYPE-echarts parameters return echarts parameters. """ tab = Tab() tab.add(hist_user_industry_base(), "User Industry") tab.add(hist_user_company_size_base(), "User Company") tab.add(hist_user_product_use_time_base(), "Product Usage Time") tab.add(wordcloud_user_job_base(), "User Occupation") return tab
def create_tab(): c = (Tab().add(_create_line(), 'Line').add(_create_table(), 'Table').add( word_cloud(), 'WorldCloud').add(_create_timeline(), 'Timeline').add(_create_tree(), 'Tree').add(_create_bar(), 'Bar')) return c
def tab_fun(): line = Line().add_xaxis(Faker.choose()).add_yaxis( series_name="xx1", y_axis=Faker.values(1, 100), itemstyle_opts=opts.ItemStyleOpts(color=Faker.rand_color())).add_yaxis( series_name="xx2", y_axis=Faker.values(1, 100), itemstyle_opts=opts.ItemStyleOpts( color=Faker.rand_color())).set_global_opts( title_opts=opts.TitleOpts(title="主标题", subtitle="副标题", pos_left="10%"), legend_opts=opts.LegendOpts(pos_left="40%"), ) bar = Bar().add_xaxis(Faker.choose()).add_yaxis( series_name="柱1", y_axis=Faker.values(1, 100), itemstyle_opts=opts.ItemStyleOpts(color=Faker.rand_color())).add_yaxis( series_name="柱2", y_axis=Faker.values(1, 100), itemstyle_opts=opts.ItemStyleOpts( color=Faker.rand_color())).set_global_opts( title_opts=opts.TitleOpts(title="主标题", subtitle="副标题", pos_left="10%"), legend_opts=opts.LegendOpts(pos_left="40%"), ) Tab().add(chart=line, tab_name="选项卡一").add( chart=bar, tab_name="选项卡二").render(path="D:/temp/pyecharts_tab.html")
def Gather(): tab = Tab(page_title='碧桂园集中式商业消费者满意度研究') CenterName, Data_Dict = CleanData() TabNames = list(Data_Dict.keys()) for TabName in TabNames: Scores_List = Data_Dict[TabName] tab.add(Satisfaction(CenterName, TabName, Scores_List), TabName) tab.add(NPS(CenterName), 'NPS') tab.render('满意度得分展示Demo.html')
def local_expense(): c = tab = Tab() tab.add(finacial_education_map(), "地方财政教育支出") result = Markup(c.render_embed()) conculsion = "小结:从图1,我们可以看出,这10年间全国各地的财政教育支出都是随年增长的,从地图可以看出广东、四川、山东、河南是全国各省中颜色从浅变深变化最明显的4个城," \ "他们的财政教育支出增加的速度很快。" return render_template('select.html', result=result, conculsion=conculsion)
def effectScatter(): c = tab = Tab() tab.add(effectscatter_symbol(), "四川特殊教育基本情况") result = Markup(c.render_embed()) return render_template( 'effectScatter.html', result=result, )
def scatter(): c = tab = Tab() tab.add(four_compare(), "四个不同省份对比") result = Markup(c.render_embed()) return render_template( 'scatter.html', result=result, )
def detail(): c = tab = Tab() tab.add(schools_map(), "各省特殊教育学校数") tab.add(teachers_map(), "特殊教育专任教师数") tab.add(recruit_students_map(), "各省特殊教育招生数") tab.add(all(), "教育支出与特殊教育在校学生详情") result = Markup(c.render_embed()) return render_template('select.html', result=result)
def depLevels(): #获取处理后的依赖关系 depList = getDepList() timestr = str(datetime.datetime.now()) g1 = levelRender('request Levels Dependencies' + timestr, 'request', depList) g2 = levelRender('underscore Levels Dependencies' + timestr, 'underscore', depList) tab = Tab() tab.add(g1, "request") tab.add(g2, "underscore") tab.render("./templates/npmLevelDependencies.html")
def test_tab_jshost_custom(): from pyecharts.globals import CurrentConfig default_host = CurrentConfig.ONLINE_HOST custom_host = "http://localhost:8888/assets/" CurrentConfig.ONLINE_HOST = custom_host bar = _create_bar() line = _create_line() tab = Tab().add(bar, "bar").add(line, "line") assert_equal(tab.js_host, custom_host) CurrentConfig.ONLINE_HOST = default_host
def get_figure_map_all(): tab_map = Tab(page_title='Map-Area') tab_map.add(province_map(label_name='现有确诊', column_name='now_confirm'), 'COVID-19疫情动态图(cn当日现有确诊病例数)') tab_map.add(province_map(label_name='累计确诊', column_name='confirm'), 'COVID-19疫情动态图(cn当日累计确诊病例数)') return tab_map
def take_snapshot(self, file_html, width="950px", height="480px"): tab = Tab(page_title="{}的交易快照@{}".format( self.symbol, self.end_dt.strftime("%Y-%m-%d %H:%M"))) for freq in self.freqs: chart = ka_to_echarts(self.kas[freq], width, height) tab.add(chart, freq) headers = ["名称", "数据"] rows = [[k, v] for k, v in self.signals.items()] table = Table() table.add(headers, rows) table.set_global_opts( title_opts=ComponentTitleOpts(title="缠论信号", subtitle="")) tab.add(table, "信号表") tab.render(file_html)
def makechart(data, debug=False): day, shsum, shpending, shcured, shdeath, shsum_date, new_date, newconfirmed, newpending = data_process( data) title = "上海新型冠状病毒统计" grid_chart = Grid() grid_chart.add(total_trend(day, shdeath, shcured, shsum, shpending), grid_opts=GridOpts(height="57%")) grid_chart.add(new_trend(day, newconfirmed, newpending), grid_opts=GridOpts(pos_top="75%", )) bk = bkey() if debug: ak = bk.ak_dev else: ak = bk.ak_web get_detail() with open(shsumary, 'r', encoding='utf-8') as f: j = json.loads(f.read()) sumary = [(k, j[k]) for k in j.keys()] tab = Tab(page_title=title) tab.add(grid_chart, "趋势") tab.add(map_visualmap(sumary), "区县分布") tab.add(bmap_base(BAIDU_AK=ak), '详细地图') tab.render(outfile)
def plot_geo_summary(self, _geo_data, series_name=None, threshold_plot=None): ''' plot geo 经纬度聚类信息 :param geo_data: geo对应的城市分布(Series) :return: ''' self.series_name = series_name if series_name else self.series_name geo_data = np.array(list(dict(_geo_data).items())) self.threshold_plot = np.percentile( _geo_data, 80) if not threshold_plot else threshold_plot _geo = self.get_geo_base(geo_data) _map = self.get_map_base(geo_data) table = self.get_table_base(geo_data) tab = Tab(page_title=self.country, ) # 选项卡多图 tab.add(_map, "VisualMap") tab.add(_geo, "HeatMap") tab.add(table, "Data") tab.render(self.html_name)
def visualize_layer(model, image, layer_name, layer_max_image=32, jupyter=True, path='visualize_layer.html'): """network layer visualize. Args: model: a tf.keras model or keras model. image: a image array with shape (1, height, width, channel), height and width are necessary. layer_name: a list of model layers name. layer_max_image: every layer max plot images. jupyter: if plot in jupyter, default True. path: if jupyter is False, result save a html file. Returns: A pyecharts polt object. """ if tf.io.gfile.exists('feature_map'): tf.io.gfile.rmtree('feature_map') tf.io.gfile.makedirs('feature_map') if np.ndim(image)==2: image = tf.expand_dims(tf.expand_dims(image, 0), -1) elif np.ndim(image)==3: image = tf.expand_dims(image, 0) elif np.ndim(image)==4: pass else: raise ValueError('image shape is error.') temp_model = tf.keras.backend.function(model.inputs, [i.output for i in model.layers if i.name in layer_name]) temp_name = [i.name for i in model.layers if i.name in layer_name] result = temp_model(image) images_per_row = 16 count = 0 name_dict = {} tab = Tab() for feature, name in zip(result, temp_name): if feature.ndim==4: if feature.shape[-1]==3: display_grid = feature[0,:,:,:].astype('uint8') else: n_features = feature.shape[-1] if feature.shape[-1]<layer_max_image else layer_max_image size = feature.shape[1] n_cols = int(np.ceil(n_features/images_per_row)) display_grid = np.ones((size * n_cols, images_per_row * size),dtype=np.uint8)*255 for col in range(n_cols): for row in range(images_per_row): if (col+1)*(row+1)>n_features: break channel_image = feature[0, :, :, col * images_per_row + row] channel_image -= channel_image.mean() channel_image /= channel_image.std() channel_image *= 64 channel_image += 128 channel_image = np.clip(channel_image, 0, 255).astype('uint8') display_grid[col * size : (col + 1) * size, row * size : (row + 1) * size] = channel_image display_grid = np.expand_dims(display_grid, axis=-1) name_dict[name] = f'./feature_map/{count}.png' ImageAug(display_grid).save_image(name_dict[name]) tab.add(image_base(name_dict[name], name, 'shape='+str(feature.shape)), name) count += 1 return tab.render_notebook() if jupyter else tab.render(path)
def rank_page1() -> Tab: tab = Tab() bar_base2015().render_notebook() bar_base2016().render_notebook() bar_base2017().render_notebook() tab.add(bar_base2015(), "2015年前二十排名") tab.add(bar_base2016(), "2016年前二十排名") tab.add(bar_base2017(), "2017年前二十排名") return tab
def rank_page2() -> Tab: tab=Tab() bar_base2015_1().render_notebook() bar_base2016_1().render_notebook() bar_base2017_1().render_notebook() tab.add(bar_base2015_1(), "2015年后二十排名") tab.add(bar_base2016_1(), "2016年后二十排名") tab.add(bar_base2017_1(), "2017年后二十排名") return tab
def main(): r = requests.get('http://127.0.0.1:5000/blocktree') print(r) data = r.json() with open("new_all_blocks.json", 'w') as file: json.dump(data, file, indent=2) file_ = "new_all_blocks.json" with open(file_, 'r') as file: file_data = json.load(file) blocks = file_data.values() num = 1 for block in blocks: block_info = block.values() for info in block_info: data = {} for i in info: if info[i]["attribute"] == "head": info[i]["attribute"] = 0 elif info[i]["attribute"] == "block": info[i]["attribute"] = 1 elif info[i]["attribute"] == "NORMAL": info[i]["attribute"] = 2 elif info[i]["attribute"] == "JUSTIFIED": info[i]["attribute"] = 3 elif info[i]["attribute"] == "FINALIZED": info[i]["attribute"] = 4 info[i].update({'x': 100, 'y': 800}) data['block'] = list(info.values()) with open("block%s.json" % num, 'w') as file1: json.dump(data, file1, indent=2) num += 1 tab = Tab() for i in range(1, num): same_height('block%s.json' % i) f = open('block%s.json' % i, ) data = json.load(f) nodes, edges, text = plot(data) categories = [ { "name": "head" }, { "name": "block" }, { "name": "normal checkpoint" }, { "name": "justified checkpoint" }, { "name": "finalized checkpoint" }] tab.add(datazoom_slider(nodes, edges, categories, text), "graph%s" % i) tab.render('blockchain.html')
def draw_tab(GPU_NUMS=8, user_name='bobo'): gpu_summary_log = QN.query_GPU_status("nvidia-smi", user_name, "PID") info_columns = ['PID', 'GPU', 'PID_PATH', 'MEM'] info_multi_tab = Tab() gpu_df = pd.DataFrame(gpu_summary_log, columns=info_columns) user_stat, gpu_util_stat, all_gpu_info = QN.status_analysis(gpu_df) gpu_util_table = table_base(info_columns, gpu_df.values.tolist()) gpu_pie = P.draw_pie_chart(gpu_util_stat, GPU_count=GPU_NUMS) info_multi_tab.add(gpu_pie, "显卡资源利用情况") info_multi_tab.add(gpu_util_table, "显卡资源使用明细") return info_multi_tab
def visualize_heatmaps(model, image, layer_name, jupyter=True, path='visualize_heatmaps.html'): """network layer visualize. Args: model: a tf.keras model or keras model. image: a image array with shape (1, height, width, channel), height and width are necessary. layer_name: a list of model layers name. jupyter: if plot in jupyter, default True. path: if jupyter is False, result save a html file. Returns: A pyecharts polt object. """ if tf.io.gfile.exists('feature_map'): tf.io.gfile.rmtree('feature_map') tf.io.gfile.makedirs('feature_map') if np.ndim(image)==2: image = tf.expand_dims(tf.expand_dims(image, 0), -1) elif np.ndim(image)==3: image = tf.expand_dims(image, 0) elif np.ndim(image)==4: pass else: raise ValueError('image shape is error.') temp_model = tf.keras.backend.function(model.inputs, [i.output for i in model.layers if i.name in layer_name]) temp_name = [i.name for i in model.layers if i.name in layer_name] result = temp_model(image) images_per_row = 16 count = 0 name_dict = {} tab = Tab() for feature, name in zip(result, temp_name): if feature.ndim==4: if model.get_layer(name).__class__.__name__=='InputLayer': out = np.squeeze(image, 0).astype('uint8') else: out = tf.image.resize(tf.expand_dims(tf.squeeze(tf.reduce_sum(tf.abs(feature), axis=-1)), axis=-1), (image.shape[1], image.shape[2])) out = 255-tf.cast(out/tf.reduce_max(out)*255., tf.uint8) # print(np.squeeze(image, 0).shape) out = addWeighted(applyColorMap(out.numpy(), COLORMAP_JET), 0.7, np.squeeze(image, 0).astype('uint8'), 0.3, 0) # out = np.expand_dims(out, axis=-1) name_dict[name] = f'./feature_map/{count}.png' ImageAug(out).save_image(name_dict[name]) tab.add(image_base(name_dict[name], name, 'shape='+str(feature.shape)), name) count += 1 return tab.render_notebook() if jupyter else tab.render(path)
def plot_summary(self, page_name='', page_type='page', web_open=False, html_name=None): assert page_type in ('page', 'tab'), '仅支持单页多图(page)和选项卡多图(tab)' if html_name: self.html_name = os.path.abspath(str(html_name) + '.html') if 'page' == page_type: page = Page(page_title=page_name) # 单页多图 for idata in self.datas: page.add(idata) page.render(self.html_name) else: tab = Tab(page_title=page_name, ) # 选项卡多图 for idata in self.datas: tab_name = idata.options.get('title').opts[0].get('text') tab.add(idata, tab_name) tab.render(self.html_name) if web_open: webbrowser.open(url='file://' + self.html_name, new=0, autoraise=True)