def plot_values_by_year(values, dates, labels, show=False, outjson=None, outimg=None): """Plot values (counts or proportions) by year. """ # Build data frame df = pd.DataFrame(values, index=dates, columns=labels) # Build Vincent plot plot = StackedBar(df, height=600, width=600) # Configure colors plot.colors(brew='Set1') # Configure legends plot.legends.append(Legend(values=labels[::-1], fill='color')) plot.legends[0].properties = LegendProperties( size=ValueRef(value=18) ) # Configure axes plot.axes[0].properties = AxisProperties(labels=PropertySet( angle=ValueRef(value=-45), dx=ValueRef(value=-20), font_size=ValueRef(value=18) )) plot.scales['y'].domain = [0, 1] # Optionally display if show: plot.display() # Optionally save save_plot(plot, outjson, outimg) return plot
def set_properties(bar, x_label='', y_label="Freq", padding=20): bar.height = 300 bar.width = 600 bar.axis_titles(x=x_label, y=y_label) ax = AxisProperties(labels=PropertySet( angle=ValueRef(value=70), dx=ValueRef(value=padding), font_size=ValueRef(value=11), font=ValueRef(value="Tahoma, Helvetica")), title=PropertySet(dy=ValueRef(value=40))) bar.axes[0].properties = ax bar.scales['x'].padding = 0.2
def get_vincent_bar_chart(self, target_df, idx): total = target_df.loc[idx, :].sum() target_df = pd.DataFrame( {'count': target_df.loc[idx, :].sort_values(ascending=False)[:20]}) vis = vincent.Bar(target_df['count']) vis.axes[0].properties = AxisProperties(labels=PropertySet( angle=ValueRef(value=45), align=ValueRef(value='left'))) vis.axis_titles(x='', y='Count of Main 20 Category / Total : {}'.format( int(total))) vis.width = 300 vis.height = 170 # vis.padding['bottom'] = 90 return vis.to_json()
def __email_summary(self): email_interval = self.__config['email']['interval'] email_subject = '服务器监控数据' while True: time.sleep(email_interval) now = datetime.now().strftime('%Y-%m-%d %H:%M:%S') stat_data = { 'cpu_stat': { 'used_percent': [], 'created_at': [] }, 'mem_stat': { 'used_percent': [], 'created_at': [] } } for table_name in stat_data.keys(): for_select = 'SELECT used_percent, created_at FROM %s WHERE created_at < "%s" ORDER BY created_at' \ % (table_name, now) for row in self.__db.query(for_select): stat_data[table_name]['used_percent'].append(row[0]) stat_data[table_name]['created_at'].append(row[1]) for_delete = 'DELETE FROM %s WHERE created_at < "%s"' % ( table_name, now) self.__db.execute(for_delete) # cpu cpu_stat_data = self.__data_preprocess(stat_data['cpu_stat']) # memory mem_stat_data = self.__data_preprocess(stat_data['mem_stat']) cpu_data_count = len(cpu_stat_data['created_at']) mem_data_count = len(mem_stat_data['created_at']) if self.__config['email_type'] == 'advanced': # 生成绘图JSON数据的细节还得仔细研究研究 for index in xrange(cpu_data_count): cpu_stat_data['created_at'][index] = datetime.strptime( cpu_stat_data['created_at'][index], '%Y-%m-%d %H:%M:%S') series = pandas.Series(cpu_stat_data['used_percent'], index=cpu_stat_data['created_at']) cpu_graph = vincent.Area(series) cpu_graph.axis_titles(x=u'Time', y=u'Usage (%)') ax = AxisProperties(labels=PropertySet(angle=ValueRef( value=-30))) cpu_graph.axes[0].properties = ax cpu_graph_json = cpu_graph.to_json() for index in xrange(mem_data_count): mem_stat_data['created_at'][index] = datetime.strptime( mem_stat_data['created_at'][index], '%Y-%m-%d %H:%M:%S') series = pandas.Series(mem_stat_data['used_percent'], index=mem_stat_data['created_at']) mem_graph = vincent.Area(series) mem_graph.axis_titles(x=u'Time', y=u'Usage (%)') ax = AxisProperties(labels=PropertySet(angle=ValueRef( value=-30))) mem_graph.axes[0].properties = ax mem_graph_json = mem_graph.to_json() email_content = self.__render_content( template_name='templates/monitor_stat.html', data={ 'cpu_stat': { 'data': cpu_graph_json, 'target_file_name': 'cpu_graph.png' }, 'mem_stat': { 'data': mem_graph_json, 'target_file_name': 'mem_graph.png' } }) if email_content is None: print u'模板渲染失败!' break print self.__email_it(subject=email_subject, content=email_content, attach_files=[{ 'file_name': 'cpu_graph.png', 'file_id': 'cpu_stat' }, { 'file_name': 'mem_graph.png', 'file_id': 'mem_stat' }]) else: # 娶最大的5个 max_n = 5 cpu_data_tuples = [(cpu_stat_data['created_at'][index], cpu_stat_data['used_percent'][index]) for index in xrange(cpu_data_count)] cpu_data_sorted = sorted(cpu_data_tuples, key=lambda item: item[1]) cpu_sorted_max = [] if cpu_data_count >= max_n: cpu_sorted_max.extend(cpu_data_sorted[0 - max_n:]) mem_data_tuples = [(mem_stat_data['created_at'][index], mem_stat_data['used_percent'][index]) for index in xrange(mem_data_count)] mem_data_sorted = sorted(mem_data_tuples, key=lambda item: item[1]) mem_sorted_max = [] if mem_data_count >= max_n: mem_sorted_max.extend(mem_data_sorted[0 - max_n:]) email_content = self.__render_content( template_name='templates/monitor_stat.html', data={ 'max_n': max_n, 'cpu_stat': { 'max_n_data': cpu_sorted_max }, 'mem_stat': { 'max_n_data': mem_sorted_max } }) if email_content is None: print u'模板渲染失败!' break print self.__email_it(subject=email_subject, content=email_content)
def create_map(results, cities, statename, time): ## Gets the geoJSON for state and cities calling functions transformCities(cities) transformStates(statename) ## Creates the pup-up graph for the city results using vincent package ## looping over the results dictionary for cityid, df in results.iteritems(): line = vincent.Line(df[['@realDonaldTrump', '@HillaryClinton']]) line.axis_titles(x='date', y='normalized weighted composite score') line.legend(title='Queries') line.width=400 line.height=200 line.axes[0].properties = AxisProperties( labels=PropertySet(angle=ValueRef(value=45), align=ValueRef(value='left'))) line.colors(brew='Set1') line.to_json('data/'+cityid+time+'.json') ## Creates the map outline m = folium.Map([34.569728, -106.119447], tiles="Mapbox Bright", zoom_start=5, min_zoom=5) fg = folium.map.FeatureGroup().add_to(m) ## Adds the states looping over the transformed GeoJSON file ## and sets colour based on time string 'after' for states of ## California and Texas geo_json_states = json.load(open('data/us_states/us_states.json')) for feature in geo_json_states['features']: if time == "after" and feature['properties']['NAME'] == 'California': fg.add_child(MultiPolygon(_locations_mirror(feature['geometry']['coordinates']), color='blue', weight=0)) elif time == "after" and feature['properties']['NAME'] == 'Texas': fg.add_child(MultiPolygon(_locations_mirror(feature['geometry']['coordinates']), color='red', weight=0)) else: fg.add_child(MultiPolygon(_locations_mirror(feature['geometry']['coordinates']), color='grey', weight=0)) ## Add the cities layer looping over the transformed GeoJSON file geo_json_cities = json.load(open('data/us_cities/us_cities.json')) for feature in geo_json_cities['features']: ## Sets colour variable based on most positive nwcs in the analysis ## timeframe places = open('data/' + cities, 'r') for line in places: attr = line.split(";") if attr[0] == feature['properties']['NAME10']: city_id = attr[1] if sum(results[city_id]['@realDonaldTrump']) > sum(results[city_id]['@HillaryClinton']): colour = 'red' else: colour = 'blue' ## Adds the polygon with the set colour fg.add_child(MultiPolygon( _locations_mirror(feature['geometry']['coordinates']), color=colour, weight=0, popup = folium.Popup(max_width=650).add_child(folium.Vega(json.load(open('data/'+city_id+time+'.json')), width=620, height=270)),)) # Saves the map in html format using the string time in filename m.save('sentiment_'+time+'.html')
def data_map(): return_dict = {} income = request.args.get("income") housing = request.args.get("housing") diversity = request.args.get("diversity") urbanization = request.args.get("urbanization") state = request.args.get("state") statename = request.args.get("statename").replace(" ","_") global df_final fit = [] fit.append(float(income)/100) fit.append(float(housing)/100) fit.append(float(diversity)/100) fit.append(float(urbanization)/100) df_final['fit'] = df_final.apply(lambda x: abs(fit[0]-x['diversity_index'])+abs(fit[1]-x['housing_index'])+abs(fit[2]-x['income_index'])+abs(fit[3]-x['urban_index']),axis=1) if state == 'ZZ': zip_topo = r'/data/zips' state_topo = r'/data/states' geo_data = [{'name': 'states', 'url': state_topo, 'feature': 'us_states.geo'}, {'name': 'zip_codes', 'url': zip_topo, 'feature': 'zip_codes_for_the_usa'}] vis = vincent.Map(data=df_final, geo_data=geo_data, scale=800, projection='albersUsa', data_bind='fit', data_key='zip5',brew='YlOrRd', map_key={'zip_codes': 'properties.zip'}) del vis.marks[0].properties.update vis.marks[1].properties.enter.stroke_opacity = ValueRef(value=0.05) vis.marks[0].properties.enter.stroke.value = '#C0C0C0' vis.legend(title='Preferred ZipCode') return_dict[0] = json.loads(vis.to_json()) ziplist = json.loads(df_final[['ZCTA5','ZIPName','fit']].sort(['fit']).reset_index().head(5).to_json()) table_data = [] for i in range (5): dict_row = {} dict_row['index'] = i dict_row['ZCTA5'] = ziplist['ZCTA5'][str(i)] dict_row['ZIPName'] = ziplist['ZIPName'][str(i)] table_data.append(dict_row) return_dict[1] = table_data #with open ('data.json','w') as outfile: # json.dump(lst,outfile) return json.dumps(return_dict) else: zip_topo = r'/data/state_map?state='+statename feature_name = statename+r'.geo' global land_area rank = int(land_area[statename]) if rank > 0 and rank <=1: scale = 700 elif rank >1 and rank <=3: scale = 2500 elif rank >2 and rank <=19: scale = 3000 elif rank >19 and rank <=26: scale = 4000 elif rank >26 and rank <=39: scale = 4500 elif rank >39 and rank <=40: scale = 5000 elif rank >40 and rank <=48: scale = 6000 else: scale = 23000 geo_data = [{'name': 'state', 'url': zip_topo, 'feature': feature_name}, {'name': 'zip_codes', 'url': zip_topo, 'feature': feature_name}] vis = vincent.Map(data=df_final[df_final['State']==state],geo_data=geo_data, scale=scale, projection='equirectangular', data_bind='fit', data_key='zip5',brew='YlOrRd', map_key={'zip_codes': 'id'}) del vis.marks[0].properties.update #vis.marks[0].properties.enter.stroke.value = '#C0C0C0' vis.marks[1].properties.enter.stroke_opacity = ValueRef(value=0.5) #vis.legend(title='Preferred ZipCode') return_dict[0] = json.loads(vis.to_json()) ziplist = json.loads(df_final[['ZCTA5','ZIPName','fit']][df_final['State']==state].sort(['fit']).reset_index().head(5).to_json()) table_data = [] for i in range (5): dict_row = {} dict_row['index'] = i dict_row['ZCTA5'] = ziplist['ZCTA5'][str(i)] dict_row['ZIPName'] = ziplist['ZIPName'][str(i)] table_data.append(dict_row) return_dict[1] = table_data return json.dumps(return_dict)
def plot_server_on_map(nodes=None): """ Creates a map of every known node and generates chart with information about their's latency.\n :return: map_full.html file """ df = pd.DataFrame({'Data 1': [1, 2, 3, 4, 5, 6, 7, 12], 'Data 2': [42, 27, 52, 18, 61, 19, 62, 33]}) # Top level Visualization vis = Visualization(width=500, height=300) vis.padding = {'top': 10, 'left': 50, 'bottom': 50, 'right': 100} # Data. We're going to key Data 2 on Data 1 vis.data.append(Data.from_pandas(df, columns=['Data 2'], key_on='Data 1', name='table')) # Scales vis.scales.append(Scale(name='x', type='ordinal', range='width', domain=DataRef(data='table', field="data.idx"))) vis.scales.append(Scale(name='y', range='height', nice=True, domain=DataRef(data='table', field="data.val"))) # Axes vis.axes.extend([Axis(type='x', scale='x'), Axis(type='y', scale='y')]) # Marks enter_props = PropertySet(x=ValueRef(scale='x', field="data.idx"), y=ValueRef(scale='y', field="data.val"), width=ValueRef(scale='x', band=True, offset=-1), y2=ValueRef(scale='y', value=0)) update_props = PropertySet(fill=ValueRef(value='steelblue')) mark = Mark(type='rect', from_=MarkRef(data='table'), properties=MarkProperties(enter=enter_props, update=update_props)) vis.marks.append(mark) vis.axis_titles(x='days', y='latency [ms]') vis.to_json('vega.json') map_full = folium.Map(location=[45.372, -121.6972], zoom_start=2) for node in nodes: name = node[2] if node[-2] == 'unknown' or node[-1] == 'unknown': continue x = float(node[-2]) y = float(node[-1]) text = """ NODE: %s, IP: %s URL: %s FULL NAME: %s LATITUDE: %s, LONGITUDE: %s """ % (node[2], node[1], node[7], node[8], node[9], node[10]) popup = folium.Popup(text.strip().replace('\n', '<br>'), max_width=1000) folium.Marker([x, y],popup=popup).add_to(map_full) map_full.save('plbmng_server_map.html')
vis.padding = {'top': 10, 'left': 50, 'bottom': 50, 'right': 100} #Data. We're going to key Data 2 on Data 1 vis.data.append(Data.from_pandas(df, columns=['Data 2'], key_on='Data 1', name='table')) #Scales vis.scales.append(Scale(name='x', type='ordinal', range='width', domain=DataRef(data='table', field="data.idx"))) vis.scales.append(Scale(name='y', range='height', nice=True, domain=DataRef(data='table', field="data.val"))) #Axes vis.axes.extend([Axis(type='x', scale='x'), Axis(type='y', scale='y')]) #Marks enter_props = PropertySet(x=ValueRef(scale='x', field="data.idx"), y=ValueRef(scale='y', field="data.val"), width=ValueRef(scale='x', band=True, offset=-1), y2=ValueRef(scale='y', value=0)) update_props = PropertySet(fill=ValueRef(value='steelblue')) mark = Mark(type='rect', from_=MarkRef(data='table'), properties=MarkProperties(enter=enter_props, update=update_props)) vis.marks.append(mark) vis.axis_titles(x='days', y='latency [ms]') vis.to_json('vega.json') map_full = folium.Map(