예제 #1
0
    def __init__(self):
        """initialize the parameters, prepare the data and build the network"""

        self.n_node, self.linked_nodes = utils.read_edges(
            config.train_filename, config.test_filename, config.n_node)
        self.discriminator = None
        self.generator = None
        print self.n_node
        self.n_node = config.n_node  # If self.n_node is smaller than config.n_node, there is isolated nodes.
        self.root_nodes = [i for i in range(self.n_node)]
        print("start reading initial embeddings")
        # read the initial embeddings
        self.node_embed_init_d = utils.read_emd(
            filename=config.pretrain_emd_filename_d,
            n_node=config.n_node,
            n_embed=config.n_embed)
        self.node_embed_init_g = utils.read_emd(
            filename=config.pretrain_emd_filename_g,
            n_node=config.n_node,
            n_embed=config.n_embed)
        print("finish reading initial embeddings")
        # use the BFS to construct the trees
        print("Constructing Trees")
        if config.import_tree:
            self.trees = pickle.load(open(config.tree_path, 'r'))
        else:
            if config.app == "recommendation":
                self.mul_construct_trees_for_recommend(self.user_nodes)
            else:  # classification
                self.mul_construct_trees(self.root_nodes)
            pickle.dump(self.trees, open(config.tree_path, 'w'))
        config.max_degree = utils.get_max_degree(self.linked_nodes)
        self.build_gan()
        self.initialize_network()
예제 #2
0
    def __init__(self):
        """initialize the parameters, prepare the data and build the network"""

        self.n_node, self.linked_nodes = utils.read_edges(
            config.train_filename, config.test_filename)
        self.root_nodes = [i for i in range(self.n_node)]
        self.discriminator = None
        self.generator = None
        assert self.n_node == config.n_node
        print("start reading initial embeddings")
        # read the initial embeddings
        self.node_embed_init_d = utils.read_emd(
            filename=config.pretrain_emd_filename_d,
            n_node=config.n_node,
            n_embed=config.n_embed)
        self.node_embed_init_g = utils.read_emd(
            filename=config.pretrain_emd_filename_g,
            n_node=config.n_node,
            n_embed=config.n_embed)
        print("finish reading initial embeddings")
        # use the BFS to construct the trees
        print("Constructing Trees")
        if config.app == "recommendation":
            self.mul_construct_trees_for_recommend(self.user_nodes)
        else:  # classification
            self.mul_construct_trees(self.root_nodes)
        config.max_degree = utils.get_max_degree(self.linked_nodes)

        self.generator = Generator(lambda_gen=config.lambda_gen,
                                   node_emd_init=self.node_embed_init_g)
        self.discriminator = Discriminator(
            lambda_dis=config.lambda_dis, node_emd_init=self.node_embed_init_d)
        self.generator.cuda()
        self.discriminator.cuda()
        self.all_score = None
예제 #3
0
    def __init__(self):
        """initialize the parameters, prepare the data and build the network"""

        self.n_node, self.linked_nodes = utils.read_edges(
            config.train_filename, config.test_filename)
        self.root_nodes = [i for i in range(self.n_node)]
        self.discriminator = None
        self.generator = None
        assert self.n_node == config.n_node
        print("start reading initial embeddings")
        # read the initial embeddings
        self.node_embed_init_d = utils.read_emd(
            filename=config.pretrain_emd_filename_d,
            n_node=config.n_node,
            n_embed=config.n_embed)
        self.node_embed_init_g = utils.read_emd(
            filename=config.pretrain_emd_filename_g,
            n_node=config.n_node,
            n_embed=config.n_embed)
        print("finish reading initial embeddings")
        # use the BFS to construct the trees
        print("Constructing Trees")
        if config.app == "recommendation":
            self.mul_construct_trees_for_recommend(self.user_nodes)
        else:  # classification
            self.mul_construct_trees(self.root_nodes)
        config.max_degree = utils.get_max_degree(self.linked_nodes)
        self.build_gan()
        self.config = tf.ConfigProto()
        self.config.gpu_options.allow_growth = True
        self.init_op = tf.group(tf.global_variables_initializer(),
                                tf.local_variables_initializer())
        self.sess = tf.Session(config=self.config)
        self.sess.run(self.init_op)
예제 #4
0
    def __init__(self):
        print("reading graphs...")
        self.n_node, self.graph = utils.read_edges(config.train_filename,
                                                   config.test_filename)
        self.root_nodes = [i for i in range(self.n_node)]

        print("reading initial embeddings...")
        self.node_embed_init_d = utils.read_embeddings(
            filename=config.pretrain_emb_filename_d,
            n_node=self.n_node,
            n_embed=config.n_emb)
        self.node_embed_init_g = utils.read_embeddings(
            filename=config.pretrain_emb_filename_g,
            n_node=self.n_node,
            n_embed=config.n_emb)

        # construct or read BFS-trees
        self.trees = None
        if os.path.isfile(config.cache_filename):
            print("reading BFS-trees from cache...")
            pickle_file = open(config.cache_filename, 'rb')
            self.trees = pickle.load(pickle_file)
            pickle_file.close()
        else:
            print("constructing BFS-trees...")
            pickle_file = open(config.cache_filename, 'wb')
            if config.multi_processing:
                self.construct_trees_with_mp(self.root_nodes)
            else:
                self.trees = self.construct_trees(self.root_nodes)
            pickle.dump(self.trees, pickle_file)
            pickle_file.close()

        print("building GAN model...")
        self.discriminator = None
        self.generator2 = None
        self.build_generator()
        self.build_discriminator()
        ###################################

        self.latest_checkpoint = tf.train.latest_checkpoint(config.model_log)
        self.saver = tf.compat.v1.train.Saver()

        self.config = tf.compat.v1.ConfigProto()
        ###########
        #self.config.gpu_options.allow_growth = True
        ###########
        self.init_op = tf.group(tf.compat.v1.global_variables_initializer(),
                                tf.compat.v1.local_variables_initializer())
        self.sess = tf.compat.v1.Session(config=self.config)
        self.sess.run(self.init_op)
예제 #5
0
 def __init__(self):
     """initialize the parameters, prepare the data and build the network"""
     self.n_node, self.linked_nodes = utils.read_edges(
         config.train_filename, config.test_filename)
     self.root_nodes = [i for i in range(self.n_node)
                        ]  #root nodes for each node
     self.discriminator = None  #none right now
     self.generator = None
     assert self.n_node == config.n_node  # if equal
     print("start reading initial embeddings")
     # read the initial embeddings
     self.node_embed_init_d = utils.read_emd(
         filename=config.pretrain_emd_filename_d,
         n_node=config.n_node,
         n_embed=config.n_embed)
     self.node_embed_init_g = utils.read_emd(
         filename=config.pretrain_emd_filename_g,
         n_node=config.n_node,
         n_embed=config.n_embed)
     print("finish reading initial embeddings")
     # use the BFS to construct the trees
     print("Constructing Trees")
     if config.app == "recommendation":
         self.mul_construct_trees_for_recommend(self.user_nodes)
     else:  # classification
         self.mul_construct_trees(self.root_nodes)
     print("after storing the data")
     config.max_degree = utils.get_max_degree(self.linked_nodes)
     print("config.max")
     self.build_gan()
     print("build gan")
     self.config = tf.ConfigProto()  #device_count = {"GPU": 0, "CPU":1})
     print("config self")
     self.config.gpu_options.allow_growth = True
     print("config gpu")
     #tf.group An operation that executes all its input
     self.init_op = tf.group(tf.global_variables_initializer(),
                             tf.local_variables_initializer())
     print("init op")
     self.sess = tf.Session(config=self.config)
     #self.sess = tf.Session()
     print("before run")
     self.sess.run(self.init_op)
예제 #6
0
def read_data():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '-n',
        dest='file_nodes',
        help='Arquivo em que cada linha deve conter o código do vértice e seu '
        'respectivo nome.')
    parser.add_argument(
        '-e',
        dest='file_edges',
        required=True,
        help=
        'Arquivo em que cada linha deve conter o código do vértice origem e o código '
        'do vértice de destino.')
    params = parser.parse_args()

    if params.file_nodes:
        nodes = read_nodes(params.file_nodes, delimiter=DELIMITER)
    else:
        nodes = {}

    edges = read_edges(params.file_edges, delimiter=DELIMITER)

    return nodes, edges
예제 #7
0
def run_badgers(graphspace):
	fname = 'badger-edges.txt'
	adj_list, edge_costs = utils.read_edges(fname)

	return
예제 #8
0
def run_yellowstone(graphspace):
	fname = 'yellowstone-edges.txt'
	adj_list, edge_costs = utils.read_edges(fname)

	return
예제 #9
0
def run_example(graphspace):
	fname = 'example-edges.txt'
	adj_list, edge_costs = utils.read_edges(fname)

	return
예제 #10
0
import heapq
from Node import *
from pprint import *
from DynamicKeyHeap import DynamicKeyHeap
        
def merge(u, v, clusters):
    dead_leader = v.leader
    surviving_leader = u.leader
    for node in clusters[dead_leader]:
        node.leader = surviving_leader
    clusters[surviving_leader].extend(clusters[dead_leader])
    del clusters[dead_leader]

if __name__ == "__main__":
    # graph = utils.read_graph("clustering1.txt")
    N, edges = utils.read_edges("clustering1.txt")
    
    # first we have each node in it's own cluster - dict key is the leader for each cluster
    clusters = {x:[Node(x, x)] for x in range(1, N + 1)}

    # creates edges list in form (Node,Node,cost) - same node references in the clusters
    # need this so when I update leader (aka cluster) info I get the ref updated
    edges = [[clusters[int(i[0])][0], clusters[int(i[1])][0], int(i[2])] for i in edges]

    sorted_edges = DynamicKeyHeap(edges, lambda edge: edge[2]) # heapify by edge' cost
    
    max_spacing = 0
    while len(clusters) >= 4:
        min_edge = sorted_edges.pop_value() #get min value
        u = min_edge[0]
        v = min_edge[1]