def syn_community1v2(args, writer=None, export_graphs=False): # data graphs1 = datagen.gen_ba( range(40, 60), range(4, 5), 500, featgen.ConstFeatureGen(np.ones(args.input_dim, dtype=float))) for G in graphs1: G.graph['label'] = 0 if export_graphs: util.draw_graph_list(graphs1[:16], 4, 4, 'figs/ba') graphs2 = datagen.gen_2community_ba( range(20, 30), range(4, 5), 500, 0.3, [featgen.ConstFeatureGen(np.ones(args.input_dim, dtype=float))]) for G in graphs2: G.graph['label'] = 1 if export_graphs: util.draw_graph_list(graphs2[:16], 4, 4, 'figs/ba2') graphs = graphs1 + graphs2 train_dataset, val_dataset, test_dataset, max_num_nodes, input_dim, assign_input_dim = prepare_data( graphs, args) if args.method == 'soft-assign': print('Method: soft-assign') model = encoders.SoftPoolingGcnEncoder( max_num_nodes, input_dim, args.hidden_dim, args.output_dim, args.num_classes, args.num_gc_layers, args.hidden_dim, assign_ratio=args.assign_ratio, num_pooling=args.num_pool, bn=args.bn, linkpred=args.linkpred, assign_input_dim=assign_input_dim).cuda() elif args.method == 'base-set2set': print('Method: base-set2set') model = encoders.GcnSet2SetEncoder(input_dim, args.hidden_dim, args.output_dim, 2, args.num_gc_layers, bn=args.bn).cuda() else: print('Method: base') model = encoders.GcnEncoderGraph(input_dim, args.hidden_dim, args.output_dim, 2, args.num_gc_layers, bn=args.bn).cuda() train(train_dataset, model, args, val_dataset=val_dataset, test_dataset=test_dataset, writer=writer)
def syn_community2hier(args, writer=None): # data feat_gen = [featgen.ConstFeatureGen(np.ones(args.input_dim, dtype=float))] graphs1 = datagen.gen_2hier(1000, [2, 4], 10, range(4, 5), 0.1, 0.03, feat_gen) graphs2 = datagen.gen_2hier(1000, [3, 3], 10, range(4, 5), 0.1, 0.03, feat_gen) graphs3 = datagen.gen_2community_ba(range(28, 33), range(4, 7), 1000, 0.25, feat_gen) for G in graphs1: G.graph['label'] = 0 for G in graphs2: G.graph['label'] = 1 for G in graphs3: G.graph['label'] = 2 graphs = graphs1 + graphs2 + graphs3 train_dataset, val_dataset, test_dataset, max_num_nodes, input_dim, assign_input_dim = prepare_data( graphs, args) if args.method == 'soft-assign': print('Method: soft-assign') model = encoders.SoftPoolingGcnEncoder( max_num_nodes, input_dim, args.hidden_dim, args.output_dim, args.num_classes, args.num_gc_layers, args.hidden_dim, assign_ratio=args.assign_ratio, num_pooling=args.num_pool, bn=args.bn, linkpred=args.linkpred, args=args, assign_input_dim=assign_input_dim).cuda() train(train_dataset, model, args, val_dataset=val_dataset, test_dataset=test_dataset, writer=writer)
input_dim = 10 n_range = range(40, 60) m_range = range(4, 5) num_graphs = 500 feature_generator = featgen.ConstFeatureGen(np.ones(input_dim, dtype=float)) graphs1 = datagen.gen_ba(n_range, m_range, num_graphs, feature_generator) for G in graphs1: G.graph['label'] = 0 n_range = range(20, 30) m_range = range(4, 5) num_graphs = 500 inter_prob = 0.3 feature_generators = [featgen.ConstFeatureGen(np.ones(input_dim, dtype=float))] graphs2 = datagen.gen_2community_ba(n_range, m_range, num_graphs, inter_prob, feature_generators) for G in graphs2: G.graph['label'] = 1 graphs = graphs1 + graphs2 # prepare_data random.shuffle(graphs) train_ratio = 0.8 test_ratio = 0.1 train_idx = int(len(graphs) * train_ratio) test_idx = int(len(graphs) * (1 - test_ratio)) train_graphs = graphs[:train_idx] val_graphs = graphs[train_idx:test_idx] test_graphs = graphs[test_idx:]