def nodes_vs_edges_over_time(): num_nodes = [] num_edges = [] graph = nx.MultiDiGraph() for end_day in graphgen._days: start = int(str(end_day) + "000000") end = int(str(end_day) + "235959") graph = graphgen.add_slice_to_graph(graph, start, end) nodes, edges = graphtools.get_num_nodes_edges_from_graph(graph) num_nodes.append(nodes) num_edges.append(edges) print nodes, edges utils.save_lists(num_nodes, num_edges, ("nodes_vs_edges"))
def all_computations(): slices = graphgen.generate_weighted_time_slices() avg_out_degrees = {} frac_nodes_in_gcc = {} num_nodes = [] num_edges = [] for i in range(len(slices)): print i, " slices out of ", len(slices) start, end = slices[i] g = cv_from_btc(start, end) if len(g) == 0: continue num_nodes.append(g.number_of_nodes()) num_edges.append(g.number_of_edges()) avg_out_deg = graphtools.get_avg_out_degree_from_graph(g) avg_out_degrees[start] = avg_out_deg frac_nodes = graphtools.get_frac_nodes_in_gcc_from_graph(g) frac_nodes_in_gcc[start] = frac_nodes utils.save_node_map(avg_out_degrees, ("avg_out_degrees_model")) utils.save_node_map(frac_nodes_in_gcc, ("frac_nodes_in_gcc_model")) utils.save_lists(num_nodes, num_edges, ("nodes_vs_edges_model"))
""" The purpose of this file is to aggregate data from the bitcoin network over time """ import sys import utils import networkx as nx import graphgen import graphtools import tags_over_time import forest_fire from networkx.algorithms import * from model import * ''' _HMS = 1000000 slices = graphgen.generate_weighted_time_slices() i = 0 dates = [] avg_clust = [] lccs = [] largest_scc = [] dates = [] avg_clust_p = [] lccs_p = [] for start, end in slices: g = graphgen.get_graph_slice(start * _HMS, end * _HMS) if len(g) == 0: continue rg = nx.gnm_random_graph(g.number_of_nodes(), g.number_of_edges(), directed=True) pfg = nx.barabasi_albert_graph(g.number_of_nodes(), g.number_of_edges()/g.number_of_nodes()) stamp = str(start) + '_' + str(end)
lccs = [] largest_scc = [] for start, end in slices: g = graphgen.get_graph_slice(start * _HMS, end * _HMS) if len(g) == 0: continue # stamp = str(start) + '_' + str(end) #tags_over_time.user_transaction_frequency(g, stamp) # tags_over_time.user_transaction_amount(g, stamp) # tags_over_time.user_buy_frequency(g, stamp) # tags_over_time.user_sell_frequency(g, stamp) # d = assortativity.average_degree_connectivity(g) # utils.save_node_map(d, stamp) deg_connectivity.append(assortativity.average_degree_connectivity(g)) undir_g = nx.Graph(g.copy()) avg_clust.append(nx.average_clustering(undir_g)) lccs.append(len(graphtools.get_lcc_from_graph(g))) largest_scc.append(len(graphtools.get_sccs_from_graph(g)[0])) dates.append(start) # hack for now to save time... print 'finished %s tag over time' % i i += 1 utils.save_lists(dates, deg_connectivity, stamp='deg_conn') utils.save_lists(dates, avg_clust, stamp='avg_clust') utils.save_lists(dates, lccs, stamp='lccs') utils.save_lists(dates, largest_scc, stamp='largest_scc') # plot single values over time # Gini coefficient (way to get single number from distribution)
# stamp = str(start) + '_' + str(end) #tags_over_time.user_transaction_frequency(g, stamp) # tags_over_time.user_transaction_amount(g, stamp) # tags_over_time.user_buy_frequency(g, stamp) # tags_over_time.user_sell_frequency(g, stamp) # d = assortativity.average_degree_connectivity(g) # utils.save_node_map(d, stamp) deg_connectivity.append(assortativity.average_degree_connectivity(g)) undir_g = nx.Graph(g.copy()) avg_clust.append(nx.average_clustering(undir_g)) lccs.append(len(graphtools.get_lcc_from_graph(g))) largest_scc.append(len(graphtools.get_sccs_from_graph(g)[0])) dates.append(start) # hack for now to save time... print 'finished %s tag over time' % i i += 1 utils.save_lists(dates, deg_connectivity, stamp='deg_conn') utils.save_lists(dates, avg_clust, stamp='avg_clust') utils.save_lists(dates, lccs, stamp='lccs') utils.save_lists(dates, largest_scc, stamp='largest_scc') # plot single values over time # Gini coefficient (way to get single number from distribution)