Exemple #1
0
    def __init__(self, instance, minibuckets, dimensions, search_method):
        self.instance = instance
        self.minibuckets = minibuckets
        self.dimensions = dimensions
        self.search_method = search_method
        if self.dimensions == 1:
            self.path = os.path.join(MONO_DIR, instance)
        else:
            self.path = os.path.join(BI_DIR, instance)

        self.graph, self.original_graph = read_graph(self.path)

        self.order = get_variables_order(self.graph)
        self.original_order = get_variables_order(self.original_graph)
        self.heuristic_solver = MiniBucket(self.order,
                                           self.original_order,
                                           self.minibuckets,
                                           vertex_cover_cost,
                                           debug=False)
        self.heuristic_solver.build_buckets()
        if search_method == "bb":
            self.search_solver = BranchAndBound(self.heuristic_solver,
                                                len(self.graph))
        elif search_method == "nsga2":
            self.search_solver = NSGA2(self.order, self.heuristic_solver)
Exemple #2
0
def objective(trial):
    test_graphs = [read_graph(
        "data/random/1000_2500_{}".format(i)).adj for i in range(5)]

    layer_num = trial.suggest_int('layer_num', 2, 20)
    feature = trial.suggest_int('feature', 5, 10)
    beta = trial.suggest_uniform('beta', 0.99, 0.997)

    gnn = GIN3(layer_num=layer_num, feature=feature)
    gnn.to(device)
    trainer = MCTSTrainer(gnn, test_graphs, "optuna_tmp_100_tran2_{}_{}_{}".format(
        layer_num, feature, beta))

    Timer.start('all')
    ans = []
    for i in range(500):
        print("epoch: ", i)
        graph = generate_random_graph(100, 250).adj
        trainer.train2(graph, 10 * beta ** i)
        trainer.test()
    Timer.end('all')
    score = 0
    coef = 1
    for all_rewards in reversed(trainer.test_result):
        coef *= 0.9
        for rewards in all_rewards:
            score += 10 * coef * np.max(rewards)
            score += coef * np.mean(rewards)

    Timer.print()
    trainer.save_model()
    trainer.save_test_result()

    return -score
Exemple #3
0
def test_graph():
    flights = [("ORD", "SEA"), ("ORD", "LAX"), ('ORD', 'DFW'), ('ORD', 'PIT'),
        ('SEA', 'LAX'), ('LAX', 'DFW'), ('ATL', 'PIT'), ('ATL', 'RDU'),
        ('RDU', 'PHL'), ('PIT', 'PHL'), ('PHL', 'PVD')]

    G = {}
    for (x,y) in flights: graph.make_link(G,x,y)
    marvel_G = graph.read_graph("resources/marvel_graph.tsv")


    assert graph.clustering_coefficient(G) == 2.0/9.0
    assert len(marvel_G) == 19255
    assert graph.path(marvel_G, 'A', 'ZZZAX') == ['A', 'W2 159', 'WOLVERINE/LOGAN ', 'W2 41', 'SUMMERS, NATHAN CHRI', 'C2 59', 'ZZZAX']
    assert 5.11 > graph.centrality(marvel_G, 'A') > 5.1
def genetic():
    start_time = time.perf_counter()

    # input_file = 'inputs/test.txt'
    # input_file = 'inputs/test2.txt'
    # input_file = 'inputs/test3.txt'
    # input_file = 'inputs/slide.txt'
    # input_file = 'inputs/graph2.txt'
    # input_file = 'inputs/medium_graph.txt'
    # input_file = 'inputs/big_graph.txt'
    # input_file = 'instances/mono-objective/n10_ep0.2_d1'
    # input_file = 'instances/bi-objective/n30_ep0.2_d2'
    # input_file = 'instances/bi-objective/n30_ep0.5_d2'
    # input_file = 'instances/bi-objective/n30_ep0.8_d2'
    # input_file = 'instances/bi-objective/n40_ep0.2_d2'
    input_file = 'instances/bi-objective/n40_ep0.5_d2'
    # input_file = 'instances/bi-objective/n40_ep0.8_d2'
    # input_file = 'instances/bi-objective/n50_ep0.5_d2'
    # input_file = 'instances/bi-objective/n60_ep0.2_d2'
    # input_file = 'instances/bi-objective/n100_ep0.2_d2'
    # input_file = 'instances/bi-objective/n100_ep0.5_d2'
    # input_file = 'instances/bi-objective/n100_ep0.8_d2'

    graph, original_graph = read_graph(input_file)
    input_name = os.path.basename(input_file) + '_{}'.format(
        MAX_MINIBUCKET_VARIABLES)

    order = get_variables_order(graph)
    original_order = get_variables_order(original_graph)
    solver = MiniBucket(order,
                        original_order,
                        MAX_MINIBUCKET_VARIABLES,
                        vertex_cover_cost,
                        debug=False)
    solver.build_buckets()

    print('Finished building minibuckets ({} max vars) in {:.3f}s\n'.format(
        MAX_MINIBUCKET_VARIABLES,
        time.perf_counter() - start_time))

    try:
        ga = NSGA2(order, solver)
        ga.run()
    except KeyboardInterrupt:
        pass

    print('Finished GA: {:.3f}s'.format(time.perf_counter() - start_time))
def main():
    start_time = time.perf_counter()
    # input_file = 'inputs/test.txt'
    input_file = 'inputs/test2.txt'
    # input_file = 'inputs/test3.txt'
    # input_file = 'instances/mono-objective/n10_ep0.2_d1'
    # input_file = 'instances/bi-objective/n30_ep0.2_d2'
    # input_file = 'instances/bi-objective/n50_ep0.5_d2'
    # input_file = 'instances/bi-objective/n100_ep0.2_d2'
    # input_file = 'instances/bi-objective/n100_ep0.5_d2'
    # input_file = 'instances/bi-objective/n100_ep0.8_d2'
    # input_file = 'inputs/slide.txt'
    # input_file = 'inputs/graph2.txt'
    # input_file = 'inputs/medium_graph.txt'
    # input_file = 'inputs/big_graph.txt'
    graph, original_graph = read_graph(input_file)
    input_name = os.path.basename(input_file) + '_{}'.format(
        MAX_MINIBUCKET_VARIABLES)

    print('Graph:')
    print(graph)
    print()

    # order = get_variables_order(graph, heuristic=None)
    # order = get_variables_order(graph, heuristic='custom', custom_order='ADBECF')
    order = get_variables_order(graph)
    original_order = get_variables_order(original_graph)

    print('Order:')
    print(order)
    print()

    solver = MiniBucket(order,
                        original_order,
                        MAX_MINIBUCKET_VARIABLES,
                        vertex_cover_cost,
                        debug=False)
    solver.build_buckets()

    print('Finished building minibuckets ({} max vars) in {:.3f}s\n'.format(
        MAX_MINIBUCKET_VARIABLES,
        time.perf_counter() - start_time))

    assignment = [1, 0, 1, 1]
    cost, best_next_assignment = solver.compute_cost(assignment)
    new_next = solver.get_best_next(assignment)
    print(cost, best_next_assignment, new_next)
Exemple #6
0
def test_graph():
    flights = [("ORD", "SEA"), ("ORD", "LAX"), ('ORD', 'DFW'), ('ORD', 'PIT'),
               ('SEA', 'LAX'), ('LAX', 'DFW'), ('ATL', 'PIT'), ('ATL', 'RDU'),
               ('RDU', 'PHL'), ('PIT', 'PHL'), ('PHL', 'PVD')]

    G = {}
    for (x, y) in flights:
        graph.make_link(G, x, y)
    marvel_G = graph.read_graph("resources/marvel_graph.tsv")

    assert graph.clustering_coefficient(G) == 2.0 / 9.0
    assert len(marvel_G) == 19255
    assert graph.path(marvel_G, 'A', 'ZZZAX') == [
        'A', 'W2 159', 'WOLVERINE/LOGAN ', 'W2 41', 'SUMMERS, NATHAN CHRI',
        'C2 59', 'ZZZAX'
    ]
    assert 5.11 > graph.centrality(marvel_G, 'A') > 5.1
Exemple #7
0
def train(idx):
    np.random.seed()
    torch.manual_seed(idx)
    test_graphs = [
        read_graph("data/random/{}_{}".format(test_graph, i)).adj
        for i in range(5)
    ]

    gnn = GIN3(layer_num=layer_num, feature=feature)
    gnn.to(device)
    trainer = MCTSTrainer(gnn, test_graphs, "{}_{}th".format(file_prefix, idx))

    Timer.start('all')

    for i in range(epoch):
        print("epoch: ", i)
        graph = generate_random_graph(node, edge).adj
        Timer.start('test')
        trainer.test()
        Timer.end('test')

        Timer.start('train')
        tmp = 0.01**(1 / epoch)
        # 10 * tmp^epoch ~= 0.1
        if train_method == "train1":
            trainer.train1(graph, 10 * tmp**i, iter_p=iter_p)
        elif train_method == "train2":
            trainer.train2(graph, 10 * tmp**i, iter_p=iter_p)
        else:
            print("no such method")
            assert False
        Timer.end('train')

    Timer.start('test')
    trainer.test()
    Timer.end('test')

    Timer.end('all')
    Timer.print()
    Counter.print()

    trainer.save_model()
    trainer.save_test_result()
Exemple #8
0
from config import device
import numpy as np
import torch
from utils.graph import read_graph, generate_random_graph
from mcts.mcts import MCTS
from mcts.mcts_trainer import MCTSTrainer
from gin.gin import GIN3
from utils.timer import Timer
from utils.counter import Counter

if __name__ == "__main__":
    test_graphs = [
        read_graph("data/random/100_250_{}".format(i)).adj for i in range(5)
    ]

    gnn = GIN3(layer_num=6)
    gnn.to(device)
    trainer = MCTSTrainer(gnn, test_graphs, "train2_p2_0th")

    Timer.start('all')

    for i in range(100):
        print("epoch: ", i)
        graph = generate_random_graph(100, 250).adj
        Timer.start('train')
        trainer.train2(graph, 10 * 0.96**i, iter_p=2)
        Timer.end('train')

        Timer.start('test')
        trainer.test()
        Timer.end('test')
Exemple #9
0
    Timer.start('all')

    result = mcts.search_for_exp(graph, time_limit=10 * 60, min_iter_num=100)
    print("graph: {}, result: {}".format(name, result))
    print("max: ", max(result))

    Timer.end('all')
    Timer.print()

    return max(result)


if __name__ == "__main__":
    gnns = best_gins()

    filename = "random/10_25_0"
    graph = read_graph("data/" + filename).adj
    print(filename)

    results = {}
    pool = Pool()
    results = pool.map(use_model, [(gnn, filename, graph) for gnn in gnns])
    pool.close()
    pool.join()
    print(results)

    print("file name: {} final max: {}".format(filename, max(results)))

    # for gnn in gnns:
    #     use_model(gnn, filename, graph)