def grow_exact_size_hrg_graphs_from_prod_rules(prod_rules, gname, n, runs=1):
    """
  Args:
    rules: production rules (model)
    gname: graph name
    n:     target graph order (number of nodes)
    runs:  how many graphs to generate

  Returns: list of synthetic graphs

  """
    if n <= 0: sys.exit(1)

    g = pcfg.Grammar('S')
    for (id, lhs, rhs, prob) in prod_rules:
        g.add_rule(pcfg.Rule(id, lhs, rhs, prob))

    #print "n", n
    num_nodes = n
    if DEBUG: print "Starting max size"
    g.set_max_size(num_nodes)
    if DEBUG: print "Done with max size"

    hstars_lst = []
    for i in range(0, runs):
        rule_list = g.sample(num_nodes)
        hstar = phrg.grow(rule_list, g)[0]
        hstars_lst.append(hstar)

    return hstars_lst
Exemple #2
0
def ba_control_hrg(v_lst):
	grow_graphs = False
	v_lst = [int(n) for n in v_lst] # set of nodes to generate BA graphs
	data = []
	prules_lst = []
	for n_v in v_lst:
		# nxgobj = nx.barabasi_albert_graph(n_v, np.random.choice(range(1,n_v)))
		nxgobj = nx.barabasi_albert_graph(n_v,3)
		nxgobj.name = "ba_%d_%d" %(nxgobj.number_of_nodes(), nxgobj.number_of_edges())

		print "ba", nxgobj.number_of_nodes(), nxgobj.number_of_edges()
		data.append(nxgobj)
		prod_rules = phrg.probabilistic_hrg_deriving_prod_rules(nxgobj)
		df = pd.DataFrame(list(prod_rules))
		out_base_fname = "ba_cntrl_%d"%(n_v)
		ofname = "Results/" + out_base_fname + ".tsv" #_________________
		df.to_csv(ofname, sep="\t", header=False, index=False)


		prules_lst.append(prod_rules)
		g = pcfg.Grammar('S')
		for (id, lhs, rhs, prob) in df.values:
			g.add_rule(pcfg.Rule(id, lhs, rhs, prob))

		num_nodes = nxgobj.number_of_nodes()

		print "	","Starting max size", 'n=', num_nodes
		g.set_max_size(num_nodes)
		print "	","Done with max size"

		Hstars = []
		num_samples = 10
		for i in range(0, num_samples):
			try:
				rule_list = g.sample(num_nodes)
			except Exception, e:
				print str(e)
				traceback.print_exc()
				continue #sys.exit(1)

			hstar = phrg.grow(rule_list, g)[0]
			Hstars.append(hstar)
		print "	", 'Save BA production rules'



		if os.path.exists(ofname):
				print '\tSaved to disk:',ofname
		if 0:
			metricx = ['degree','clust', 'hop', 'gcd']
			metrics.network_properties([nxgobj], metricx, Hstars, name=nxgobj.name, out_tsv=False)
def Hstar_Graphs_Control(G, graph_name, axs=None):

    # Derive the prod rules in a naive way, where
    prod_rules = phrg.probabilistic_hrg_learning(G)
    pp.pprint(prod_rules)
    exit()
    g = pcfg.Grammar('S')
    for (id, lhs, rhs, prob) in prod_rules:
        g.add_rule(pcfg.Rule(id, lhs, rhs, prob))

    num_nodes = G.number_of_nodes()

    print "Starting max size", 'n=', num_nodes
    g.set_max_size(num_nodes)

    print "Done with max size"

    Hstars = []

    num_samples = 20
    print '*' * 40
    for i in range(0, num_samples):
        rule_list = g.sample(num_nodes)
        hstar = phrg.grow(rule_list, g)[0]
        Hstars.append(hstar)

    # if 0:
    #   g = nx.from_pandas_dataframe(df, 'src', 'trg', edge_attr=['ts'])
    #   draw_degree_whole_graph(g,axs)
    #   draw_degree(Hstars, axs=axs, col='r')
    #   #axs.set_title('Rules derived by ignoring time')
    #   axs.set_ylabel('Frequency')
    #   axs.set_xlabel('degree')

    if 0:
        # metricx = [ 'degree','hops', 'clust', 'assort', 'kcore','eigen','gcd']
        metricx = ['gcd']
        # g = nx.from_pandas_dataframe(df, 'src', 'trg',edge_attr=['ts'])
        # graph_name = os.path.basename(f_path).rstrip('.tel')
        if DBG: print ">", graph_name
        metrics.network_properties([G],
                                   metricx,
                                   Hstars,
                                   name=graph_name,
                                   out_tsv=True)
def grow_exact_size_hrg_graphs_from_prod_rules(prod_rules, gname, n, runs=1):
    """
	Args:
		rules: production rules (model)
		gname: graph name
		n:     target graph order (number of nodes)
		runs:  how many graphs to generate

	Returns: list of synthetic graphs

	"""
    nslog("grow_exact_size_hrg_graphs_from_prod_rules")
    DBG = True
    if n <= 0: sys.exit(1)

    g = pcfg.Grammar('S')
    for (id, lhs, rhs, prob) in prod_rules:
        g.add_rule(pcfg.Rule(id, lhs, rhs, prob))

    print
    print "Added rules HRG (pr", len(prod_rules), ", n,", n, ")"
    exit()  # temp pls remove me

    num_nodes = n
    if DBG: print "Starting max size"
    g.set_max_size(num_nodes)
    if DBG: print "Done with max size"

    hstars_lst = []
    print "  ",
    for i in range(0, runs):
        print '>',
        rule_list = g.sample(num_nodes)
        hstar = phrg.grow(rule_list, g)[0]
        hstars_lst.append(hstar)

    return hstars_lst
# ['r4.0', 'A,B,C,D,E,F,G', ['0,A:T', '0,B:T', '0,F:T', '0,G:T', '0,A,B,C,E,F:N', '0,A,B,C,D,E,F,G:N'], 1.0],
# ['r5.0', 'A,B,C,D,E,F,G,H',  ['0,C:T', '0,D:T', '0,E:T', '0,F:T', '0,G:T', '0,H:T', 'A,B,D,0,E:N'], 0.333333333333],
# ['r5.1', 'A,B,C,D,E,F,G,H',  ['0,A:T', '0,B:T', '0,C:T', '0,E:T', '0,G:T', '0,H:T', '0,A,B,C,D,E,F,H:N'], 0.333333333333],
# ['r5.2', 'A,B,C,D,E,F,G,H',  ['0,D:T', '0,E:T', '0,F:T', 'C,0,F,G,H:N', 'A,B,C,D,0,E,F,G:N'] ,0.333333333333],
# ['r6.0', 'A,B,C,D,E',  ['0,A:T', '0,B:T', '0,C:T', '0,D:T', '0,E:T', 'B,0,A:N'] ,0.25],
# ['r6.1', 'A,B,C,D,E',  ['0,A:T', '0,B:T', '0,C:T', '0,D:T', '0,E:T'] ,0.25],
# ['r6.2', 'A,B,C,D,E',  ['0,B:T', '0,C:T', '0,D:T', '0,E:T', '0,A:T'] ,0.25],
# ['r6.3', 'A,B,C,D,E',  ['0,A:T', '0,B:T', '0,C:T', '0,D:T', '0,B,C,D,E,A:N'] ,0.25],
# ['r7.0', 'A,B,C,D',  ['0,B:T', '0,C:T', '0,D:T', '0,A:T', '0,B,C,D,A:N'], 1.0],
# ['r8.0', 'A,B,C,D,E,F',  ['0,E:T', '0,F:T', '0,A,B,C,D,E,F:N'] ,0.5],
# ['r8.1', 'A,B,C,D,E,F',  ['0,B:T', '0,C:T', '0,D:T', '0,E:T', '0,F:T', '0,A:T', 'A,0,D,E,F:N'] ,0.5]
# ]

g = pcfg.Grammar('S')
for (id, lhs, rhs, prob) in rules:
    g.add_rule(pcfg.Rule(id, lhs, rhs, prob))

print 'Grammar g loaded.'
# Synthetic Graphs
#num_nodes = int(sys.argv[-1])
g.set_max_size(num_nodes)

hStars = []
for i in range(20):
    rule_list = g.sample(num_nodes)
    hstar = phrg.grow(rule_list, g)[0]
    hStars.append(hstar)
    print i, hstar.number_of_nodes(), hstar.number_of_edges()

metricx = ['degree', 'hops', 'clust', 'gcd']
metrics.network_properties([G], metricx, hStars, name=graph_name, out_tsv=True)
Exemple #6
0
	"""
	in_fname = in_fname
	df = pd.read_csv(in_fname, delimiter='\t', header=None)
	rhs_clean = lambda rhs_rule: [f[1:-1] for f in re.findall("'.+?'", rhs_rule)]
	try:
		df['rhslst'] = df[2].apply(rhs_clean)
	except Exception, e:
		print str(e)
		df['rhslst'] = df['rhs'].apply(rhs_clean)

	df = df[[0, 1, 'rhslst',3]]
	#~#	// ** //
	g = pcfg.Grammar('S')
	for (id, lhs, rhs, prob) in df.values:
		# print (id, lhs, rhs, prob)
		g.add_rule(pcfg.Rule(id, lhs, rhs, float(prob)))
	#
	fbname = os.path.basename(in_fname)
	print fbname.split("_")[1]
	
	#	try:
	#		num_nodes = int(fbname.split("_")[1].strip(".tsv"))
	#	except Exception, e:
	#		print str(e)
	from tdec.load_edgelist_from_dataframe import Pandas_DataFrame_From_Edgelist
	eldf = Pandas_DataFrame_From_Edgelist([orig_el])[0]
	G = nx.from_pandas_dataframe(eldf, source='src', target='trg')
	G.name = [x for x in os.path.basename(orig_el).split(".") if len(x)>3][0]

	num_nodes = G.number_of_nodes()