示例#1
0
 def setUp(self):
     # A brand new graph.
     self.new = Graph()
     # A disconnected graph
     self.disconnected = Graph()
     self.disconnected.add_edge(1, 2)
     self.disconnected.add_edge(3, 4)
     # A complete graph
     self.complete = Graph()
     self.complete.add_edge(1, 2, 3)
     self.complete.add_edge(2, 3, 5)
     self.complete.add_edge(3, 4, 7)
     self.complete.add_edge(4, 1, 5)
     self.complete.add_edge(1, 3, 4)
     self.complete.add_edge(2, 4, 6)
示例#2
0
class TestExample2(unittest.TestCase):
    home = Vertex('Home')
    about = Vertex('About')
    product = Vertex('Product')
    links = Vertex('Links')
    a = Vertex('External Site A')
    b = Vertex('External Site B')
    c = Vertex('External Site C')
    d = Vertex('External Site D')
    edges = [
        Edge(home, about, 1), Edge(about, home, 1),
        Edge(home, product, 1), Edge(product, home, 1),
        Edge(home, links, 1), Edge(links, home, 1),
        Edge(links, a, 1),
        Edge(links, b, 1),
        Edge(links, c, 1),
        Edge(links, d, 1),
    ]
    g = Graph([home, about, product, links, a, b, c, d], edges)

    def test_pagerank(self):
        pr_result = self.g.pagerank()
        self.assertAlmostEqual(pr_result[self.home], 0.92, places=2)
        self.assertAlmostEqual(pr_result[self.about], 0.41, places=2)
        self.assertAlmostEqual(pr_result[self.product], 0.41, places=2)
        self.assertAlmostEqual(pr_result[self.links], 0.41, places=2)
        self.assertAlmostEqual(pr_result[self.a], 0.22, places=2)
        self.assertAlmostEqual(pr_result[self.b], 0.22, places=2)
        self.assertAlmostEqual(pr_result[self.c], 0.22, places=2)
        self.assertAlmostEqual(pr_result[self.d], 0.22, places=2)
def main():
    """Main entry point"""
    args = basic_parser().parse_args()
    if VERBOSE:
        print(args.instances)
    for instance in args.instances:
        graph = None
        with open(instance, 'r') as instance_file:
            graph = Graph(instance_file)
            graph.name = os.path.splitext(os.path.basename(instance))[0]
        if VERBOSE:
            print('-' * 100)
            print('File: {name}.txt'.format(name=graph.name))
        start = time.time()
        S = iterated_local_search(graph, args.max_iter, args.time_limit,
                                  args.exclude_ls)
        elapsed = time.time() - start
        if VERBOSE:
            if S is None:
                print('! NO SOLUTION FOUND: NO SATISFYING INITIAL !')
            else:
                print('O* = {o}'.format(o=IlsObjective()(graph, S, None)))
                print('All served?', S.all_served(graph.customer_number))
                print('Everything satisfied?',
                      satisfies_all_constraints(graph, S))
                print('----- PERFORMANCE -----')
                print('ILS took {some} seconds'.format(some=elapsed))
                # visualize(S)
            print('-' * 100)
        if S is not None and not args.no_sol:
            filedir = os.path.dirname(os.path.abspath(__file__))
            generate_sol(graph, S, cwd=filedir, prefix='_ils_')
    return 0
示例#4
0
class TestExample1(unittest.TestCase):
    a, b, c, d = [Vertex(l) for l in 'ABCD']
    edges = [
        Edge(a, b, 1),
        Edge(a, c, 1),
        Edge(c, a, 1),
        Edge(b, c, 1),
        Edge(d, c, 1),
    ]
    g = Graph([a,b,c,d], edges)

    def test_graph_connection_cache(self):
        self.assertEqual(self.g.inV[self.a], set([(self.c, 1)]))
        self.assertEqual(self.g.inV[self.b], set([(self.a, 1)]))
        self.assertEqual(self.g.inV[self.c], set([(self.a, 1), (self.b, 1), (self.d, 1)]))
        self.assertEqual(self.g.inV[self.d], set())

        self.assertEqual(self.g.outV[self.a], set([(self.b, 1), (self.c, 1)]))
        self.assertEqual(self.g.outV[self.b], set([(self.c, 1)]))
        self.assertEqual(self.g.outV[self.c], set([(self.a, 1)]))
        self.assertEqual(self.g.outV[self.d], set([(self.c, 1)]))

    def test_pagerank(self):
        pr_result = self.g.pagerank()
        self.assertAlmostEqual(pr_result[self.a], 1.49, places=2)
        self.assertAlmostEqual(pr_result[self.b], 0.78, places=2)
        self.assertAlmostEqual(pr_result[self.c], 1.58, places=2)
        self.assertAlmostEqual(pr_result[self.d], 0.15, places=2)
def main():

    structures = pd.read_csv('../../../input/structures.csv')
    strs_gp = structures.groupby('molecule_name')

    bonds = pd.read_csv('../../../input/bonds.csv')
    bonds_gp = bonds.groupby('molecule_name')

    train = pd.merge(
        pd.read_csv('../../../dataset/train.csv'),
        pd.read_csv('../../../dataset/scalar_coupling_contributions.csv'))
    train_gp = train.groupby('molecule_name')

    list_atoms = list(set(structures['atom']))
    print(list_atoms)

    model = TripletUpdateNet(num_layer=4,
                             node_dim=512,
                             edge_dim=256,
                             triplet_dim=128)
    model.to_gpu()

    train_charges = pd.read_csv('../../../input/train_ob_charges.csv')
    train_charges_gp = train_charges.groupby('molecule_name')

    target1 = 'dsgdb9nsd_000008'

    g1 = Graph(strs_gp.get_group(target1), bonds_gp.get_group(target1),
               list_atoms, train_charges_gp.get_group(target1))
    y1 = train_gp.get_group(target1)
    out = model([g1], [y1])
    print(out)

    target2 = 'dsgdb9nsd_000010'

    g2 = Graph(strs_gp.get_group(target2), bonds_gp.get_group(target2),
               list_atoms, train_charges_gp.get_group(target2))
    y2 = train_gp.get_group(target2)
    out = model([g2], [y2])
    print(out)

    out = model([g1, g2], [y1, y2])
    print(out)
    def test_edge_weight(self):
        g = Graph()

        g.add_edge(0, 1, 5)
        self.assertTrue(g.get_edge_weight(0, 1) == 5)

        g.increase_edge_weight(0, 1, 1)
        self.assertTrue(g.get_edge_weight(0, 1) == 6)

        g.increase_edge_weight(0, 2, 3)
        self.assertTrue(g.get_edge_weight(0, 2) == 3)
    def test_remove_node(self):
        g = Graph()
        g.add_edge(0, 1)
        g.add_edge(1, 2)
        g.add_edge(2, 3)

        g.remove_vertex(1)

        self.assertTrue((0, 1) not in g)
        self.assertTrue((1, 2) not in g)
        self.assertTrue((1, 2) not in g)
        self.assertTrue((2, 3) in g)
示例#8
0
def main():

    structures = pd.read_csv('../../../input/structures.csv')
    strs_gp = structures.groupby('molecule_name')

    bonds = pd.read_csv('../../../input/bonds.csv')
    bonds_gp = bonds.groupby('molecule_name')

    train = pd.read_csv('../../../input/train2.csv')
    train_gp = train.groupby('molecule_name')

    train_charges = pd.read_csv('../../../input/train_ob_charges.csv')
    train_charges_gp = train_charges.groupby('molecule_name')

    list_atoms = list(set(structures['atom']))
    print(list_atoms)

    model = EdgeUpdateNet(num_layer=10, node_dim=512, edge_dim=512)
    model.to_gpu()

    target1 = 'dsgdb9nsd_000008'

    g1 = Graph(strs_gp.get_group(target1), bonds_gp.get_group(target1),
               list_atoms, train_charges_gp.get_group(target1))
    y1 = train_gp.get_group(target1)
    out = model([g1], [y1])
    print(out)

    target2 = 'dsgdb9nsd_000010'

    g2 = Graph(strs_gp.get_group(target2), bonds_gp.get_group(target2),
               list_atoms, train_charges_gp.get_group(target2))
    y2 = train_gp.get_group(target2)
    out = model([g2], [y2])
    print(out)

    out = model([g1, g2], [y1, y2])
    print(out)
示例#9
0
    def setUp(self):
        from io import StringIO
        self.graph = Graph(StringIO(SearchUtilsTests.BASIC_VRP))
        super(SearchUtilsTests, self).setUp()

        def distance(graph, solution, md):
            """Calculate overall distance"""
            del md
            s = 0
            for route in solution:
                s += sum(graph.costs[(route[i], route[i + 1])]
                         for i in range(len(route) - 1))
            return s

        self.obj = distance
示例#10
0
def summarize(text):
    print("Creating graph...")
    vertices = [Vertex(s) for s in text_to_sentences(text)]
    edges = []

    for sentence_a, sentence_b in combinations(vertices, 2):
        score = similarity(sentence_a.data, sentence_b.data)
        edges.append(Edge(sentence_a, sentence_b, score))
        edges.append(Edge(sentence_b, sentence_a, score))

    # drop the memoized entries
    sentence_to_words.cache_clear()

    g = Graph(vertices, edges)
    print("Graph initialized: |V|=%d, |E|=%d" % (len(vertices), len(edges)))

    print("Running pagerank...")
    return g.sort_pagerank()
示例#11
0
    def test_add_edge(self):
        g = Graph()
        g.add_edge(0, 1)

        self.assertTrue((0, 1) in g)
        self.assertTrue((1, 0) not in g)
示例#12
0
import socket
from lib.graph import Graph, GraphEncoder, GraphDecoder
from json import loads, dumps
from bsonrpc import JSONRpc
from bsonrpc.exceptions import FramingError
from bsonrpc.framing import (JSONFramingNetstring, JSONFramingNone,
                             JSONFramingRFC7464)

# Cut-the-corners TCP Client:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('localhost', 50001))

rpc = JSONRpc(s, framing_cls=JSONFramingNone)
server = rpc.get_peer_proxy()

graph = Graph()
graph.add_node("leaf1")
graph.add_node("leaf2")
graph.add_node(
    "root", [graph.nodes["leaf1"], graph.nodes["leaf2"], graph.nodes["leaf1"]])

encoder = GraphEncoder()

# Execute in server:
result = loads(server.increment(encoder.default(graph)), cls=GraphDecoder)
result.nodes["root"].show()

result = loads(server.increment(encoder.default(result)), cls=GraphDecoder)
result.nodes["root"].show()

rpc.close()  # Closes the socket 's' also
示例#13
0
import numpy as np
import tensorflow as tf
from tensorflow.keras.layers import Input
from lib.gcn import GraphConv, TemporalConv
from lib.graph import Graph

skeleton = Graph("sbu", "spatial")

input_features = Input([30, skeleton.num_node, 3], dtype="float32")
x = tf.keras.layers.Conv2D(64 * 3, (3, 1), padding="same")(input_features)
input_A = Input(tensor=tf.keras.backend.constant(skeleton.A))
x, A = GraphConv(64, t_kernels=3)([input_features, input_A])
x = TemporalConv(64, dropout=0.5)(x)
x, A = GraphConv(128, t_kernels=3)([x, A])
x = TemporalConv(128, dropout=0.5)(x)
print(x.shape)
示例#14
0
def graph():
    return Graph()
示例#15
0
       else:
           for v in graph.adj[u]:
               if v not in visited:
                   visited.append(v)
                   queue.append(v)

    return bfs_len, visited

if __name__ == '__main__':

    filename = "../tests/DiameterGraph.txt"
    with open(filename) as fd:
        V = int(fd.readline())
        E = int(fd.readline())

        g = Graph(V)
        for lines in range(E):
            u, v = tuple(map(int, fd.readline().split()))
            g.add_edge(u, v)

        maxd = 0
        for u in g.adj.iterkeys():
            d, vertices = find_diameter(g, u)
            maxd = max(d, maxd)

        print "Max diameter {} vertices {}".format(maxd, vertices)




示例#16
0
>>> end = "Penzance"
>>> cities.weight(cities.shortest_path(start, end))
284
>>> cities.weighted_distance(start, end)
284

Although all cities are directly connected to each other,
the shortest path is not necessarily the direct path.

>>> cities.shortest_path(start, end)
['Luton', 'Nuneaton', 'Penzance']
"""

from lib.graph import Graph

cities = Graph()
cities.add_edge("Scunthorpe", "Bridlington", 31)
cities.add_edge("Scunthorpe", "Wick", 514)
cities.add_edge("Scunthorpe", "Bognor", 252)
cities.add_edge("Scunthorpe", "Nuneaton", 111)
cities.add_edge("Scunthorpe", "Luton", 117)
cities.add_edge("Scunthorpe", "Wrexham", 142)
cities.add_edge("Scunthorpe", "Penzance", 318)
cities.add_edge("Bridlington", "Luton", 142)
cities.add_edge("Bridlington", "Nuneaton", 115)
cities.add_edge("Bridlington", "Bognor", 209)
cities.add_edge("Bridlington", "Penzance", 426)
cities.add_edge("Bridlington", "Wrexham", 162)
cities.add_edge("Bridlington", "Wick", 512)
cities.add_edge("Luton", "Wick", 627)
cities.add_edge("Luton", "Bognor", 112)
示例#17
0
from lib.graph import Graph

# Predict expected return for each time step
agent = Agent(STATES, ACTIONS, REWARDS, HORIZON, DISCOUNT_FACTOR, POLICY,
              TRANSITIONS)
expected_return_under_policy = agent.expected_return_under_policy()

# Get expected rewards at end of episode
expected_rewards = {}
for state in STATES:
    expected_rewards[state] = round(expected_return_under_policy[state,
                                                                 HORIZON - 1])

# Simulate an episode to get actual rewards
actual_rewards = {}
for state in STATES:
    episode = Episode(ACTIONS, STATES, REWARDS, HORIZON, POLICY, TRANSITIONS,
                      state)
    actual_rewards[state] = 0
    for _ in range(EPISODES):
        actual_rewards[state] += episode.play()
    actual_rewards[state] /= EPISODES

# Compare predicted vs. actual rewards
print('Expected rewards:', expected_rewards)
print('Actual rewards:  ', actual_rewards)

# Draw state-action diagram
graph = Graph(STATES, ACTIONS, LABELS)
graph.draw()
示例#18
0
    def get_graph(self, addr):
        from capstone import CS_OP_IMM, CS_ARCH_MIPS

        ARCH_UTILS = self.load_arch_module().utils

        curr = self.lazy_disasm(addr)
        if curr == None:
            return None

        gph = Graph(self, addr)
        rest = []
        start = time.clock()
        prefetch = None

        # WARNING: this assume that on every architectures the jump
        # address is the last operand (operands[-1])

        while 1:
            if not gph.exists(curr):
                if self.arch == CS_ARCH_MIPS:
                    prefetch = self.__prefetch_inst(curr)

                if ARCH_UTILS.is_uncond_jump(curr) and len(curr.operands) > 0:
                    if curr.operands[-1].type == CS_OP_IMM:
                        addr = curr.operands[-1].value.imm
                        nxt = self.lazy_disasm(addr)
                        gph.set_next(curr, nxt, prefetch)
                        rest.append(nxt.address)
                    else:
                        # Can't interpret jmp ADDR|reg
                        gph.add_node(curr, prefetch)
                    gph.uncond_jumps_set.add(curr.address)

                elif ARCH_UTILS.is_cond_jump(curr) and len(curr.operands) > 0:
                    if curr.operands[-1].type == CS_OP_IMM:
                        nxt_jump = self.lazy_disasm(curr.operands[-1].value.imm)

                        if self.arch == CS_ARCH_MIPS:
                            direct_nxt = \
                                self.lazy_disasm(prefetch.address + prefetch.size)
                        else:
                            direct_nxt = \
                                self.lazy_disasm(curr.address + curr.size)

                        gph.set_cond_next(curr, nxt_jump, direct_nxt, prefetch)
                        rest.append(nxt_jump.address)
                        rest.append(direct_nxt.address)
                    else:
                        # Can't interpret jmp ADDR|reg
                        gph.add_node(curr, prefetch)
                    gph.cond_jumps_set.add(curr.address)

                elif ARCH_UTILS.is_ret(curr):
                    gph.add_node(curr, prefetch)

                else:
                    try:
                        nxt = self.lazy_disasm(curr.address + curr.size)
                        gph.set_next(curr, nxt)
                        rest.append(nxt.address)
                    except:
                        gph.add_node(curr)
                        pass

            try:
                curr = self.lazy_disasm(rest.pop())
            except IndexError:
                break

        if self.binary.type == T_BIN_PE:
            self.binary.pe_reverse_stripped_symbols(self)

        elapsed = time.clock()
        elapsed = elapsed - start
        debug__("Graph built in %fs" % elapsed)

        return gph
示例#19
0
    def get_graph(self, entry_addr):
        from capstone import CS_OP_IMM, CS_ARCH_MIPS

        ARCH_UTILS = self.load_arch_module().utils

        gph = Graph(self, entry_addr)
        stack = [entry_addr]
        start = time()
        prefetch = None

        # WARNING: this assume that on every architectures the jump
        # address is the last operand (operands[-1])

        # Here each instruction is a node. Blocks will be created in the
        # function __simplify.

        while stack:
            ad = stack.pop()
            inst = self.lazy_disasm(ad)

            if inst is None:
                # Remove all previous instructions which have a link
                # to this instruction.
                if ad in gph.link_in:
                    for i in gph.link_in[ad]:
                        gph.link_out[i].remove(ad)
                    for i in gph.link_in[ad]:
                        if not gph.link_out[i]:
                            del gph.link_out[i]
                    del gph.link_in[ad]
                continue

            if gph.exists(inst):
                continue

            if ARCH_UTILS.is_ret(inst):
                if self.arch == CS_ARCH_MIPS:
                    prefetch = self.__prefetch_inst(inst)
                gph.new_node(inst, prefetch, None)

            elif ARCH_UTILS.is_uncond_jump(inst):
                if self.arch == CS_ARCH_MIPS:
                    prefetch = self.__prefetch_inst(inst)
                gph.uncond_jumps_set.add(ad)
                op = inst.operands[-1]
                if op.type == CS_OP_IMM:
                    nxt = op.value.imm
                    stack.append(nxt)
                    gph.new_node(inst, prefetch, [nxt])
                else:
                    if inst.address in self.jmptables:
                        table = self.jmptables[inst.address].table
                        stack += table
                        gph.new_node(inst, prefetch, table)
                    else:
                        # Can't interpret jmp ADDR|reg
                        gph.new_node(inst, prefetch, None)

            elif ARCH_UTILS.is_cond_jump(inst):
                if self.arch == CS_ARCH_MIPS:
                    prefetch = self.__prefetch_inst(inst)
                gph.cond_jumps_set.add(ad)
                op = inst.operands[-1]
                if op.type == CS_OP_IMM:
                    if self.arch == CS_ARCH_MIPS:
                        direct_nxt = prefetch.address + prefetch.size
                    else:
                        direct_nxt = inst.address + inst.size

                    nxt_jmp = op.value.imm

                    stack.append(direct_nxt)
                    stack.append(nxt_jmp)
                    gph.new_node(inst, prefetch, [direct_nxt, nxt_jmp])
                else:
                    # Can't interpret jmp ADDR|reg
                    gph.new_node(inst, prefetch, None)

            else:
                nxt = inst.address + inst.size
                stack.append(nxt)
                gph.new_node(inst, None, [nxt])

        if len(gph.nodes) == 0:
            return None, 0

        if self.binary.type == T_BIN_PE:
            nb_new_syms = self.binary.pe_reverse_stripped_symbols(self)
        else:
            nb_new_syms = 0

        elapsed = time()
        elapsed = elapsed - start
        debug__("Graph built in %fs (%d instructions)" %
                (elapsed, len(gph.nodes)))

        return gph, nb_new_syms
示例#20
0
]

bands = [
    Band(id=1,
         cost=1,
         frequency_range_from=1,
         frequency_range_to=384,
         loss_per_km=0.046),
    Band(id=2,
         cost=2,
         frequency_range_from=385,
         frequency_range_to=768,
         loss_per_km=0.055),
]

graph = Graph(Graphical(data_xml_root))
demands = read_demands(data_xml_root, graph)

problem = Problem(graph, demands, transponders, bands)

gen_alg = GeneticAlgorithm(problem, 2000, 5000, 0.01)
gen_alg.init_population()

period_of_save_result = 10
i = 1
while True:
    gen_alg.generate_new_population()

    print("Epoch nr", i)
    print("Best result:")
    print("   Cost = ", gen_alg.P[0].get_cost())
示例#21
0
def main():

    random.seed(params.seed)
    np.random.seed(params.seed)

    tic = time.time()
    logger = init_logger('_log/log_b{:d}_l{:d}_nd{:d}_seed{:d}'.format(
        params.batch_size, params.num_layer, params.node_dim, params.seed))

    logger.info('parameters')
    logger.info(vars(params))

    train, valid, test, train_moles, valid_moles = load_dataset(params.seed)

    train_moles = sorted(train_moles)
    valid_moles = sorted(valid_moles)
    valid.sort_values('molecule_name', inplace=True)

    logger.info('train moles: {} ...'.format(train_moles[:5]))
    logger.info('valid moles: {} ...'.format(valid_moles[:5]))

    test_moles = sorted(list(set(test['molecule_name'])))
    test.sort_values('molecule_name', inplace=True)

    logger.info('train data: {}'.format(train.shape))
    logger.info('valid data: {}'.format(valid.shape))
    logger.info('test data: {}'.format(test.shape))

    structures = pd.read_csv('../../input/structures.csv')
    structures_groups = structures.groupby('molecule_name')

    bonds = pd.read_csv('../../input/bonds.csv')
    bonds_gp = bonds.groupby('molecule_name')

    train_charges = pd.read_csv('../../input/train_ob_charges.csv')
    train_charges_gp = train_charges.groupby('molecule_name')

    test_charges = pd.read_csv('../../input/test_ob_charges.csv')
    test_charges_gp = test_charges.groupby('molecule_name')

    train_targets = train.groupby('molecule_name')
    valid_targets = valid.groupby('molecule_name')
    test_targets = test.groupby('molecule_name')

    if params.debug:
        random.shuffle(train_moles)
        train_moles = train_moles[:5000]
        test_moles = test_moles[:1000]

    valid.sort_values('id', inplace=True)
    test.sort_values('id', inplace=True)

    list_atoms = list(set(structures['atom']))

    train_graphs = dict()
    for mole in tqdm(train_moles):
        train_graphs[mole] = Graph(structures_groups.get_group(mole),
                                   bonds_gp.get_group(mole), list_atoms,
                                   train_charges_gp.get_group(mole))

    valid_graphs = dict()
    for mole in tqdm(valid_moles):
        valid_graphs[mole] = Graph(structures_groups.get_group(mole),
                                   bonds_gp.get_group(mole), list_atoms,
                                   train_charges_gp.get_group(mole))

    test_graphs = dict()
    for mole in tqdm(test_moles):
        test_graphs[mole] = Graph(structures_groups.get_group(mole),
                                  bonds_gp.get_group(mole), list_atoms,
                                  test_charges_gp.get_group(mole))

    model = EdgeUpdateNet(num_layer=params.num_layer,
                          node_dim=params.node_dim,
                          edge_dim=params.edge_dim,
                          gpu=params.gpu)
    if params.gpu >= 0:
        logger.info('transfer model to GPU {}'.format(params.gpu))
        model.to_gpu(params.gpu)

    optimizer = optimizers.Adam(alpha=5e-4)
    optimizer.setup(model)
    model.cleargrads()

    epoch = 2 if params.debug else params.epoch

    for ep in range(epoch):

        logger.info('')
        logger.info('')
        logger.info('start epoch {}'.format(ep))
        logger.info('')

        # -------------------------
        logger.info('')
        logger.info('training')

        loss_value = 0
        random.shuffle(train_moles)
        train_batches_moles = generate_batches(structures_groups, train_moles,
                                               params.batch_size)
        random.shuffle(train_batches_moles)

        for batch_moles in tqdm(train_batches_moles):

            list_train_X = list()
            list_train_y = list()

            for target_mol in batch_moles:
                list_train_X.append(train_graphs[target_mol])
                list_train_y.append(train_targets.get_group(target_mol))

            with chainer.using_config('train', ep == 0):

                loss = model(list_train_X, list_train_y)

                model.cleargrads()
                loss.backward()
                optimizer.update()

            loss_value += cuda.to_cpu(loss.data)

        logger.info('train loss: {:.3f}'.format(
            float(loss_value) / len(train_moles)))

        # -------------------------
        logger.info('')
        logger.info('validation')

        valid_df = predicts(structures_groups, valid_moles, valid_graphs,
                            valid_targets, model, params.batch_size)

        valid_pred = valid_df[['fc', 'sd', 'pso', 'dso']]

        valid_score = calc_score(valid, valid_pred.values)
        logger.info('valid score: {:.3f}'.format(valid_score))

        # -------------------------

        optimizer.alpha = optimizer.alpha * 0.95

        logger.info('change learning rate: {:.6f}'.format(optimizer.alpha))

        if (ep + 1) % 20 == 0:

            # -------------------------
            # save model

            dir_model = Path('_model')
            logger.info('save model')
            dir_model.mkdir(exist_ok=True)
            serializers.save_npz(
                dir_model / 'model_ep{}_seed{}.npz'.format(ep, params.seed),
                model)

            # -------------------------
            # make submission

            logger.info('')
            logger.info('test')

            test_df = predicts(structures_groups, test_moles, test_graphs,
                               test_targets, model, params.batch_size)
            make_submission(test_df,
                            ep,
                            valid_score,
                            params.seed,
                            dir_sub=Path('_submission'))
            make_submission(valid_df,
                            ep,
                            valid_score,
                            params.seed,
                            dir_sub=Path('_valid'))

    toc = time.time() - tic
    logger.info('Elapsed time {:.1f} [min]'.format(toc / 60))