pos = nx.spring_layout(G, weight = 'weight') nx.draw_networkx(G, pos) pyl.savefig('bof.png') #st.image('bof.png') nt = Network('750px', '750px', notebook=True, heading = "") nt.from_nx(G) nt.force_atlas_2based() options = { "nodes":{ "shape":"dot", "size":15, "font":{ "size":16 }, "borderWidth":2 } } nt.options = options nt.show('bof.html') html_file = open('bof.html', 'r') source_code = html_file.read() components.html(source_code, height = 800, width = 800) show_map(senators_plot) st.subheader('Table comparing the similarity of tweets to all the other senators') st.markdown("Uses 3 different metrics - Spacy similarity (using word vectors), Cosine similarity, and Euclidean Distance to compare the senator to others.") st.dataframe(df) # Same as st.write(df)
def CreateGraph(self, graph_type, overlaps): overlap_frame = pd.DataFrame( columns=['Source', 'Target', 'Type', 'Weight']) for overlap in overlaps: for sub_lap in overlaps[overlap]: overlap_frame = overlap_frame.append( { 'Source': overlap, 'Target': sub_lap[0], 'Type': 'directed', 'Weight': ((1 - sub_lap[1]) / 25) }, ignore_index=True) net = Network(height='100%', width='100%', directed=True) sources = overlap_frame['Source'] targets = overlap_frame['Target'] weights = overlap_frame['Weight'] edge_data = zip(sources, targets, weights) graph = nx.DiGraph() for index, e in enumerate(edge_data): src = e[0] dst = e[1] w = e[2] if (graph_type == 'NetworkX'): graph.add_node(src) graph.add_node(dst) graph.add_edge(src, dst, weights=w) else: net.add_node(src, src, title=src, physics=False, group=index, arrowStrikethrough=False) net.add_node(dst, dst, title=dst, physics=False, group=index, arrowStrikethrough=False) net.add_edge(src, dst, value=w, physics=False) if (graph_type == 'PyVis'): options = { 'layout': { 'hierarchical': { 'enabled': True, 'levelSeparation': 50, 'treeSpacing': 75, 'nodeSpacing': 500, 'edgeMinimization': False } } } net.options = options connections = net.get_adj_list() for node in net.nodes: node['size'] = len(connections[node['id']]) / 3 node['title'] += ' Neighbors: <br>' + '<br>'.join( connections[node['id']]) node['value'] = len(connections[node['id']]) net.from_nx(graph) net.show('SimilarityVisualizationGraph.html') else: degrees = [val * 10 for (node, val) in graph.degree()] pos = nx.circular_layout(graph) nx.draw(graph, pos, node_size=degrees, with_labels=True, font_size=8) plt.show()
def graphMaker(filename): graphObj = Network(height="100%", width="100%", bgcolor="#222222", font_color="white", heading="") graphObj.barnes_hut() graphObj.options = { "nodes": { "borderWidth": 2, "borderWidthSelected": 3, "color": { "border": "rgba(186,182,181,1)", "background": "rgba(247,27,0,1)", "highlight": { "border": "rgba(196,194,189,1)", "background": "rgba(255,29,0,1)" }, "hover": { "border": "rgba(184,187,188,1)", "background": "rgba(255,55,0,1)" } } }, "edges": { "arrowStrikethrough": False, "color": { "inherit": True, "opacity": 0.35 }, "font": { "strokeWidth": 28 }, "smooth": False }, "physics": { "barnesHut": { "gravitationalConstant": -80000, "springLength": 250, "springConstant": 0.001 }, "minVelocity": 0.75 } } #Default display options for graph data = pandas.read_csv("graphs/" + filename + ".csv") extraDataCA = pandas.read_csv("data/CAvideos.csv") extraDataUSA = pandas.read_csv("data/USvideos.csv") extraDataGB = pandas.read_csv("data/GBvideos.csv") vectorLikes = np.concatenate( (extraDataGB['likes'], extraDataUSA['likes'], extraDataCA['likes'])) vectorDislikes = np.concatenate( (extraDataGB['dislikes'], extraDataUSA['dislikes'], extraDataCA['dislikes'])) vectorComments = np.concatenate( (extraDataGB['comment_count'], extraDataUSA['comment_count'], extraDataCA['comment_count'])) vectorViews = np.concatenate( (extraDataGB['views'], extraDataUSA['views'], extraDataCA['views'])) vectorTitles = np.concatenate( (extraDataGB['title'], extraDataUSA['title'], extraDataCA['title'])) vectorID = np.concatenate( (extraDataGB['video_id'], extraDataUSA['video_id'], extraDataCA['video_id'])) TitleMap = {} for i in range(0, len(vectorTitles)): TitleMap[vectorID[i]] = vectorTitles[i] InfoMap = {} for i in range(0, len(TitleMap)): views = " Views: " + str(vectorViews[i]) likes = " Likes: " + str(vectorLikes[i]) dislikes = " Dislikes: " + str(vectorDislikes[i]) comments = " Number of Comments: " + str(vectorComments[i]) tempList = [views, likes, dislikes, comments] InfoMap[TitleMap[vectorID[i]]] = tempList #RENAME IF NECESSARY sourceNodes = data['Source'] destNodes = data['Target'] edges = zip(sourceNodes, destNodes) for i in edges: fromNode = TitleMap[i[0]] toNode = TitleMap[i[1]] graphObj.add_node(fromNode, fromNode, title=fromNode) graphObj.add_node(toNode, toNode, title=toNode) graphObj.add_edge(fromNode, toNode) for node in graphObj.nodes: node["title"] += "\n<br>" + "<br>".join(InfoMap[node["id"]]) node["value"] = 4 graphObj.show("Visual Analysis for " + filename + " Videos on YouTube.html")