Exemplo n.º 1
0
ppt = time.time()

f = open(DATADIR + DATASET + '\\' + FILE_NAME, 'w')

pt = time.time()
net = Graph(DATADIR + DATASET + '\\links.txt', typ='dir', order=ORDER, withdiag=WITHDIAG)
print('READ TIME: %.2f' % (time.time() - pt))

f.write('%d %d %d\n' % (net.nVertices, net.nEdges, DIMENSION))

pt = time.time()
grouping_model = Louvain(net, rand=RANDOM_GROUPING)
groups = grouping_model.execute(merge=MERGE)
print('GROUP TIME: %.2f' % (time.time() - pt))

inv_index_original = groups2inv_index(groups, net.nVertices)

pt = time.time()
k_set = sample(net, k=K_SIZE, method=SAMPLE_METHOD)
print('SAMPLE TIME: %.2f' % (time.time() - pt))

inv_index = groups2inv_index(groups, net.nVertices, k_set)
pure_override_nodes(groups, inv_index)
groups = [k_set] + groups

pt = time.time()
model = Optimizer(net, groups, dim=DIMENSION, lam=LAMBDA, eta=ETA, max_iter=MAX_ITER,
                  sample_strategy=SAMPLE_METHOD, verbose=True)
print('INITIAL OPTIMIZER TIME (SVD): %.2f' % (time.time() - pt))

with Pool(processes=4) as pool:
Exemplo n.º 2
0
    random.shuffle(new_interest_set)

    groups = []
    group = []
    for node in new_interest_set:
        group.append(node)
        if len(group) >= MERGE[1]:
            groups.append(group)
            group = []

    pt = time.time()
    k_set = sample(net, k=K_SIZE, method=SAMPLE_METHOD)
    sample_time = time.time() - pt
    print('SAMPLE TIME: %.2f' % sample_time)

    inv_index = groups2inv_index(groups, net.nVertices, k_set)
    pure_override_nodes(groups, inv_index)
    groups = [k_set] + groups

    pt = time.time()
    optimizer = Optimizer(net,
                          groups,
                          dim=DIMENSION,
                          lam=LAMBDA,
                          eta=ETA,
                          max_iter=MAX_ITER,
                          sample_strategy=SAMPLE_METHOD,
                          verbose=VERBOSE)
    svd_time = time.time() - pt
    print('INITIAL OPTIMIZER TIME (SVD): %.2f' % svd_time)
Exemplo n.º 3
0
def process(args):
    pt = time.time()
    WITHDIAG = True if args.order == 1 else False
    net = Graph(args.input, typ='dir', order=args.order,
                withdiag=WITHDIAG, verbose=(args.v > 1))
    read_time = time.time() - pt
    if args.v:
        print('READ TIME:\t%.2f' % read_time)

    pt = time.time()
    grouping_model = Louvain(net, rand=True, verbose=(args.v > 1))
    groups = grouping_model.execute(merge=(args.merge0, args.merge1))
    group_time = time.time() - pt
    if args.v:
        print('GROUP TIME:\t%.2f' % group_time)

    SAMPLE_METHOD = 'set_cover_undir'
    if args.sample == 1:
        SAMPLE_METHOD = 'deg_deter'
    elif args.sample == 2:
        SAMPLE_METHOD = 'deg_prob'
    elif args.sample == 3:
        SAMPLE_METHOD = 'deg^2_prob'
    elif args.sample == 4:
        SAMPLE_METHOD = 'uniform'

    pt = time.time()
    k_set = sample(net, k=args.ksize, method=SAMPLE_METHOD)
    sample_time = time.time() - pt
    if args.v:
        print('SAMPLE TIME:\t%.2f' % sample_time)

    inv_index = groups2inv_index(groups, net.nVertices, k_set)
    pure_override_nodes(groups, inv_index)
    groups = [k_set] + groups

    pt = time.time()
    optimizer = Optimizer(net, groups, dim=args.dim, lam=args.lam, eta=args.eta,
                          max_iter=args.iter, sample_strategy=SAMPLE_METHOD,
                          verbose=(args.v > 1))
    svd_time = time.time() - pt
    if args.v:
        print('INITIAL OPTIMIZER TIME (SVD):\t%.2f' % svd_time)

    pt = time.time()
    branches = []
    for t in range(len(groups)):
        branches.append(BranchOptimizer(optimizer, t, verbose=(args.v > 1)))
    prep_time = time.time() - pt
    if args.v:
        print('PROCESS PREPARATION TIME:\t%.2f' % prep_time)

    pt = time.time()
    with Pool(processes=args.workers) as pool:
        grouped_embeddings = pool.map(WrapTrain, branches)
    embed_time = time.time() - pt
    print('OPTIMIZING TIME:\t%.2f' % embed_time)

    if args.v:
        total_time = read_time + group_time + sample_time + \
                     svd_time + prep_time + embed_time
        print('TOTAL TIME:\t%.2f' % total_time)

    if args.output is not None:
        filename = args.output + '_' + '_'.join(
            ['ksize=%d' % args.ksize,
             'dim=%d' % args.dim,
             'lam=%.1f' % args.lam,
             'eta=%.1f' % args.eta,
             'iter=%d' % args.iter
            ])
        f = open(filename, 'w')
        f.write('|V|=%d; |E|=%d; dim=%d\n' % (net.nVertices, net.nEdges, args.dim))
        for i, group in enumerate(groups):
            embeddings = grouped_embeddings[i][1]
            for j, newVid in enumerate(group):
                vid = net.newVid2vid_mapping[newVid]
                f.write('%d ' % vid)
                vec = np.array(embeddings[:, j])
                vec_str = ' '.join([str(t) for t in vec[:, 0]])
                f.write(vec_str)
                f.write('\n')
        f.close()
Exemplo n.º 4
0
pt = time.time()
net = Graph(DATADIR + DATASET + '\\links.txt',
            typ=TYPE,
            order=ORDER,
            withdiag=WITHDIAG,
            verbose=VERBOSE)
print('READ TIME: %.2f' % (time.time() - pt))

pt = time.time()
grouping_model = Louvain(net, verbose=VERBOSE)
groups = grouping_model.execute(merge=MERGE)
print('GROUP TIME: %.2f' % (time.time() - pt))

group_sizes = [len(t) for t in groups]
vid2originalGroup_index = groups2inv_index(groups, net.nVertices)
vid2originalGroupSize_index = [
    group_sizes[t - 1] for t in vid2originalGroup_index
]

pt = time.time()
k_set = sample(net,
               k=K_SIZE,
               method=SAMPLE_METHOD,
               vertex_group_sizes=vid2originalGroupSize_index)
print('SAMPLE TIME: %.2f' % (time.time() - pt))

inv_index = groups2inv_index(groups, net.nVertices, k_set)
pure_override_nodes(groups, inv_index)
groups = [k_set] + groups