Beispiel #1
0
    def test_create_more_nodes(self):
        from deephyper.search.nas.model.space import AutoKSearchSpace
        from deephyper.search.nas.model.space.node import VariableNode
        from deephyper.search.nas.model.space.op.op1d import Dense
        struct = AutoKSearchSpace((5, ), (1, ), regression=True)

        vnode1 = VariableNode()
        struct.connect(struct.input_nodes[0], vnode1)

        vnode1.add_op(Dense(10))

        vnode2 = VariableNode()
        vnode2.add_op(Dense(10))

        struct.connect(vnode1, vnode2)

        struct.set_ops([0, 0])

        falias = 'test_auto_keras_search_spaceure'
        struct.draw_graphviz(f'{falias}.dot')

        model = struct.create_model()
        from tensorflow.keras.utils import plot_model

        plot_model(model, to_file=f'{falias}.png', show_shapes=True)
def create_dense_cell(input_nodes):
    """MLP type 2

    Args:
        input_nodes (list(Node)): possible inputs of the current cell.

    Returns:
        Cell: a Cell instance.
    """
    cell = Cell(input_nodes)

    node = VariableNode(name='N')
    node.add_op(Dense(10, tf.nn.relu))
    node.add_op(Dense(10, tf.nn.relu))
    node.add_op(Dense(10, tf.nn.relu))
    node.add_op(Dense(10, tf.nn.relu))
    cell.graph.add_edge(input_nodes[0], node)

    # Block
    block = Block()
    block.add_node(node)

    cell.add_block(block)

    cell.set_outputs()
    return cell
 def create_conv_node(name):
     n = Node(name)
     n.add_op(Identity())
     n.add_op(Conv1D(filter_size=3, num_filters=16))
     n.add_op(Conv1D(filter_size=5, num_filters=16))
     n.add_op(Conv1D(filter_size=10, num_filters=16))
     n.add_op(MaxPooling1D(pool_size=3, padding='same'))
     n.add_op(MaxPooling1D(pool_size=5, padding='same'))
     n.add_op(MaxPooling1D(pool_size=10, padding='same'))
     n.add_op((Dense(10, tf.nn.relu)))
     n.add_op((Dense(20, tf.nn.relu)))
     return n
Beispiel #4
0
def test_mirror_node():
    vnode = VariableNode()
    vop = Dense(10)
    vnode.add_op(vop)
    vnode.add_op(Dense(20))

    mnode = MirrorNode(vnode)

    vnode.set_op(0)

    assert vnode.op == vop
    assert mnode.op == vop
Beispiel #5
0
def create_search_space_old(
        input_shape=(2, ), output_shape=(5, ), *args, **kwargs):
    ss = AutoKSearchSpace(input_shape, output_shape, regression=True)

    prev = ss.input_nodes[0]
    for _ in range(3):
        cn = ConstantNode(Dense(10, "relu"))
        ss.connect(prev, cn)
        prev = cn

    cn = ConstantNode(Dense(5))
    ss.connect(prev, cn)
    return ss
Beispiel #6
0
def test_mime_node():
    vnode = VariableNode()
    vop = Dense(10)
    vnode.add_op(vop)
    vnode.add_op(Dense(20))

    mnode = MimeNode(vnode)
    mop = Dense(30)
    mnode.add_op(mop)
    mnode.add_op(Dense(40))

    vnode.set_op(0)

    assert vnode.op == vop
    assert mnode.op == mop
def add_dense_to_(node):
    node.add_op(Identity())  # we do not want to create a layer in this case

    activations = [None, tf.nn.relu, tf.nn.tanh, tf.nn.sigmoid]
    for units in range(16, 97, 16):
        for activation in activations:
            node.add_op(Dense(units=units, activation=activation))
def create_search_space(
        input_shape=(100, ), output_shape=[(1),
                                           (100, )], num_layers=5, **kwargs):
    struct = KSearchSpace(input_shape, output_shape)

    inp = struct.input_nodes[0]

    # auto-encoder
    units = [128, 64, 32, 16, 8, 16, 32, 64, 128]
    # units = [32, 16, 32]
    prev_node = inp
    d = 1
    for i in range(len(units)):
        vnode = VariableNode()
        vnode.add_op(Identity())
        if d == 1 and units[i] < units[i + 1]:
            d = -1
            # print(min(1, units[i]), ' - ', max(1, units[i])+1)
            for u in range(min(2, units[i]), max(2, units[i]) + 1, 2):
                vnode.add_op(Dense(u, tf.nn.relu))
            latente_space = vnode
        else:
            # print(min(units[i], units[i+d]), ' - ', max(units[i], units[i+d])+1)
            for u in range(min(units[i], units[i + d]),
                           max(units[i], units[i + d]) + 1, 2):
                vnode.add_op(Dense(u, tf.nn.relu))
        struct.connect(prev_node, vnode)
        prev_node = vnode

    out2 = ConstantNode(op=Dense(100, name="output_1"))
    struct.connect(prev_node, out2)

    # regressor
    prev_node = latente_space
    # prev_node = inp
    for _ in range(num_layers):
        vnode = VariableNode()
        for i in range(16, 129, 16):
            vnode.add_op(Dense(i, tf.nn.relu))

        struct.connect(prev_node, vnode)
        prev_node = vnode

    out1 = ConstantNode(op=Dense(1, name="output_0"))
    struct.connect(prev_node, out1)

    return struct
def add_dense_op_(node):
    node.add_op(Identity())
    node.add_op(Dense(units=10))
    node.add_op(Dense(units=50))
    node.add_op(Dense(units=100))
    node.add_op(Dense(units=200))
    node.add_op(Dense(units=250))
    node.add_op(Dense(units=500))
    node.add_op(Dense(units=750))
    node.add_op(Dense(units=1000))
Beispiel #10
0
def add_mlp_op_(node):
    node.add_op(Identity())
    node.add_op(Dense(100, tf.nn.relu))
    node.add_op(Dense(100, tf.nn.tanh))
    node.add_op(Dense(100, tf.nn.sigmoid))
    node.add_op(Dropout(0.3))
    node.add_op(Dense(500, tf.nn.relu))
    node.add_op(Dense(500, tf.nn.tanh))
    node.add_op(Dense(500, tf.nn.sigmoid))
    node.add_op(Dropout(0.4))
    node.add_op(Dense(1000, tf.nn.relu))
    node.add_op(Dense(1000, tf.nn.tanh))
    node.add_op(Dense(1000, tf.nn.sigmoid))
    node.add_op(Dropout(0.5))
Beispiel #11
0
def create_mlp_node(node):
    node.add_op(Identity())
    node.add_op(Dense(100, tf.nn.relu))
    node.add_op(Dense(100, tf.nn.tanh))
    node.add_op(Dense(100, tf.nn.sigmoid))
    node.add_op(Dropout(0.05))
    node.add_op(Dense(500, tf.nn.relu))
    node.add_op(Dense(500, tf.nn.tanh))
    node.add_op(Dense(500, tf.nn.sigmoid))
    node.add_op(Dropout(0.1))
    node.add_op(Dense(1000, tf.nn.relu))
    node.add_op(Dense(1000, tf.nn.tanh))
    node.add_op(Dense(1000, tf.nn.sigmoid))
    node.add_op(Dropout(0.2))
def create_structure(input_shape=(2, ), output_shape=(1, ), *args, **kwargs):
    struct = AutoOutputStructure(input_shape, output_shape, regression=False)

    n1 = ConstantNode(op=Conv1D(filter_size=20, num_filters=128), name='N')
    struct.connect(struct.input_nodes[0], n1)

    n2 = ConstantNode(op=Activation(activation='relu'), name='N')
    struct.connect(n1, n2)

    n3 = ConstantNode(op=MaxPooling1D(pool_size=1, padding='same'), name='N')
    struct.connect(n2, n3)

    n4 = ConstantNode(op=Conv1D(filter_size=10, num_filters=128), name='N')
    struct.connect(n3, n4)

    n5 = ConstantNode(op=Activation(activation='relu'), name='N')
    struct.connect(n4, n5)

    n6 = ConstantNode(op=MaxPooling1D(pool_size=10, padding='same'), name='N')
    struct.connect(n5, n6)

    n7 = ConstantNode(op=Flatten(), name='N')
    struct.connect(n6, n7)

    n8 = ConstantNode(op=Dense(units=200), name='N')
    struct.connect(n7, n8)

    n9 = ConstantNode(op=Activation(activation='relu'), name='N')
    struct.connect(n8, n9)

    n10 = ConstantNode(op=Dropout(rate=0.1), name='N')
    struct.connect(n9, n10)

    n11 = ConstantNode(op=Dense(units=20), name='N')
    struct.connect(n10, n11)

    n12 = ConstantNode(op=Activation(activation='relu'), name='N')
    struct.connect(n11, n12)

    n13 = ConstantNode(op=Dropout(rate=0.1), name='N')
    struct.connect(n12, n13)

    return struct
Beispiel #13
0
def create_search_space(
        input_shape=(2, ), output_shape=(3, ), *args, **kwargs):

    ss = KSearchSpace(input_shape, output_shape)
    x = ss.input_nodes[0]

    out_xor = ConstantNode(op=Dense(1), name="XOR")
    ss.connect(x, out_xor)

    out_and = ConstantNode(op=Dense(1), name="AND")
    ss.connect(x, out_and)

    out_or = ConstantNode(op=Dense(1), name="OR")
    ss.connect(x, out_or)

    out = ConstantNode(name="OUT")
    out.set_op(Concatenate(ss, stacked_nodes=[out_xor, out_and, out_or]))

    return ss
Beispiel #14
0
def create_mlp_block(cell, input_node):

    # first node of block
    n1 = ConstantNode(op=Dense(1000, tf.nn.relu), name='N1')
    cell.graph.add_edge(input_node, n1)  # fixed input of current block

    # second node of block
    n2 = ConstantNode(op=Dense(1000, tf.nn.relu), name='N2')

    n3 = ConstantNode(op=Dense(1000, tf.nn.relu), name='N3')

    block = Block()
    block.add_node(n1)
    block.add_node(n2)
    block.add_node(n3)

    block.add_edge(n1, n2)
    block.add_edge(n2, n3)
    return block
Beispiel #15
0
        def add_mlp_ops_to(vnode):
            # REG_L1 = 1.
            # REG_L2 = 1.

            vnode.add_op(Identity())
            vnode.add_op(Dense(100, tf.nn.relu))
            vnode.add_op(Dense(100, tf.nn.tanh))
            vnode.add_op(Dense(100, tf.nn.sigmoid))
            vnode.add_op(Dropout(0.05))
            vnode.add_op(Dense(500, tf.nn.relu))
            vnode.add_op(Dense(500, tf.nn.tanh))
            vnode.add_op(Dense(500, tf.nn.sigmoid))
            vnode.add_op(Dropout(0.1))
            vnode.add_op(Dense(1000, tf.nn.relu))
            vnode.add_op(Dense(1000, tf.nn.tanh))
            vnode.add_op(Dense(1000, tf.nn.sigmoid))
            vnode.add_op(Dropout(0.2))
Beispiel #16
0
def create_structure(input_shape=[(1, ), (942, ), (5270, ), (2048, )],
                     output_shape=(1, ),
                     num_cells=2,
                     *args,
                     **kwargs):

    struct = AutoOutputStructure(input_shape, output_shape, regression=True)
    input_nodes = struct.input_nodes

    output_submodels = [input_nodes[0]]

    for i in range(1, 4):
        cnode1 = ConstantNode(name='N', op=Dense(1000, tf.nn.relu))
        struct.connect(input_nodes[i], cnode1)

        cnode2 = ConstantNode(name='N', op=Dense(1000, tf.nn.relu))
        struct.connect(cnode1, cnode2)

        cnode3 = ConstantNode(name='N', op=Dense(1000, tf.nn.relu))
        struct.connect(cnode2, cnode3)

        output_submodels.append(cnode3)

    merge1 = ConstantNode(name='Merge')
    merge1.set_op(Concatenate(struct, merge1, output_submodels))

    cnode4 = ConstantNode(name='N', op=Dense(1000, tf.nn.relu))
    struct.connect(merge1, cnode4)

    prev = cnode4

    for i in range(num_cells):
        cnode = ConstantNode(name='N', op=Dense(1000, tf.nn.relu))
        struct.connect(prev, cnode)

        merge = ConstantNode(name='Merge')
        merge.set_op(AddByPadding(struct, merge, [cnode, prev]))

        prev = merge

    return struct
Beispiel #17
0
    def create_block():
        # first node of block
        n1 = Node('N1')
        for inpt in input_nodes:
            n1.add_op(Connect(cell.graph, inpt, n1))

        # second node of block
        mlp_op_list = list()
        mlp_op_list.append(Identity())
        mlp_op_list.append(Dense(5, tf.nn.relu))
        mlp_op_list.append(Dense(5, tf.nn.tanh))
        mlp_op_list.append(Dense(10, tf.nn.relu))
        mlp_op_list.append(Dense(10, tf.nn.tanh))
        mlp_op_list.append(Dense(20, tf.nn.relu))
        mlp_op_list.append(Dense(20, tf.nn.tanh))
        n2 = Node('N2')
        for op in mlp_op_list:
            n2.add_op(op)

        # third node of block
        n3 = Node('N3')
        for op in dropout_ops:
            n3.add_op(op)

        # 5 Blocks
        block = Block()
        block.add_node(n1)
        block.add_node(n2)
        block.add_node(n3)

        block.add_edge(n1, n2)
        block.add_edge(n2, n3)
        return block
Beispiel #18
0
def create_search_space(input_shape=(2,), output_shape=(3,), *args, **kwargs):

    ss = KSearchSpace(input_shape, output_shape)
    x = ss.input_nodes[0]

    # z = ConstantNode(op=Dense(4, activation="relu"), name="Z")
    # ss.connect(x, z)
    # x = z

    out_xor = ConstantNode(op=Dense(1, activation="sigmoid"), name="out_XOR")

    out_and = ConstantNode(op=Dense(1, activation="sigmoid"), name="out_AND")

    out_or = ConstantNode(op=Dense(1, activation="sigmoid"), name="out_OR")

    in_xor = VariableNode(name="in_XOR")
    in_xor.add_op(Concatenate(ss, [x]))  # 0
    in_xor.add_op(Concatenate(ss, [x, out_and]))  # 1
    in_xor.add_op(Concatenate(ss, [x, out_or]))  # 2
    in_xor.add_op(Concatenate(ss, [x, out_and, out_or]))  # 3
    ss.connect(in_xor, out_xor)

    in_and = VariableNode(name="in_AND")
    in_and.add_op(Concatenate(ss, [x]))
    in_and.add_op(Concatenate(ss, [x, out_xor]))
    in_and.add_op(Concatenate(ss, [x, out_or]))
    in_and.add_op(Concatenate(ss, [x, out_xor, out_or]))
    ss.connect(in_and, out_and)

    in_or = VariableNode(name="in_OR")
    in_or.add_op(Concatenate(ss, [x]))
    in_or.add_op(Concatenate(ss, [x, out_xor]))
    in_or.add_op(Concatenate(ss, [x, out_and]))
    in_or.add_op(Concatenate(ss, [x, out_xor, out_and]))
    ss.connect(in_or, out_or)

    out = ConstantNode(name="OUT")
    out.set_op(Concatenate(ss, stacked_nodes=[out_xor, out_and, out_or]))

    return ss
Beispiel #19
0
        def create_mlp_node(name):

            n = VariableNode(name)
            n.add_op(Identity())
            n.add_op(Dense(100, tf.nn.relu))
            n.add_op(Dense(100, tf.nn.tanh))
            n.add_op(Dense(100, tf.nn.sigmoid))
            n.add_op(Dropout(0.05))
            n.add_op(Dense(500, tf.nn.relu))
            n.add_op(Dense(500, tf.nn.tanh))
            n.add_op(Dense(500, tf.nn.sigmoid))
            n.add_op(Dropout(0.1))
            n.add_op(Dense(1000, tf.nn.relu))
            n.add_op(Dense(1000, tf.nn.tanh))
            n.add_op(Dense(1000, tf.nn.sigmoid))
            n.add_op(Dropout(0.2))

            return n
Beispiel #20
0
def create_dense_cell_type2(input_nodes):
    """MLP type 2

    Args:
        input_nodes (list(Node)): possible inputs of the current cell.

    Returns:
        Cell: a Cell instance.
    """
    cell = Cell(input_nodes)

    # first node of block
    n1 = Node('N_0')
    for inpt in input_nodes:
        n1.add_op(Connect(cell.graph, inpt, n1))

    # second node of block
    mlp_op_list = list()
    mlp_op_list.append(Identity())
    mlp_op_list.append(Dense(5, tf.nn.relu))
    mlp_op_list.append(Dense(10, tf.nn.relu))
    mlp_op_list.append(Dense(20, tf.nn.relu))
    mlp_op_list.append(Dense(40, tf.nn.relu))
    mlp_op_list.append(Dense(80, tf.nn.relu))
    mlp_op_list.append(Dense(160, tf.nn.relu))
    mlp_op_list.append(Dense(320, tf.nn.relu))
    n2 = Node('N_1')
    for op in mlp_op_list:
        n2.add_op(op)

    # third
    n3 = Node('N_2')
    drop_ops = []
    drop_ops.extend(dropout_ops)
    for op in drop_ops:
        n3.add_op(op)

    # 1 Blocks
    block1 = Block()
    block1.add_node(n1)
    block1.add_node(n2)
    block1.add_node(n3)

    block1.add_edge(n1, n2)
    block1.add_edge(n2, n3)

    cell.add_block(block1)

    cell.set_outputs()
    return cell
    def test_create_one_vnode_with_wrong_output_shape(self):
        from deephyper.search.nas.model.space import KSearchSpace
        struct = KSearchSpace((5, ), (1, ))

        from deephyper.search.nas.model.space.node import VariableNode
        vnode = VariableNode()

        struct.connect(struct.input_nodes[0], vnode)

        from deephyper.search.nas.model.space.op.op1d import Dense
        vnode.add_op(Dense(10))

        struct.set_ops([0])

        with pytest.raises(WrongOutputShape):
            struct.create_model()
    def test_create_multiple_inputs_with_one_vnode(self):
        from deephyper.search.nas.model.space import KSearchSpace
        from deephyper.search.nas.model.space.node import VariableNode, ConstantNode
        from deephyper.search.nas.model.space.op.op1d import Dense
        from deephyper.search.nas.model.space.op.merge import Concatenate
        struct = KSearchSpace([(5, ), (5, )], (1, ))

        merge = ConstantNode()
        merge.set_op(Concatenate(struct, struct.input_nodes))

        vnode1 = VariableNode()
        struct.connect(merge, vnode1)

        vnode1.add_op(Dense(1))

        struct.set_ops([0])

        struct.create_model()
    def test_create_one_vnode(self):
        from deephyper.search.nas.model.space import KSearchSpace
        struct = KSearchSpace((5, ), (1, ))

        from deephyper.search.nas.model.space.node import VariableNode
        vnode = VariableNode()

        struct.connect(struct.input_nodes[0], vnode)

        from deephyper.search.nas.model.space.op.op1d import Dense
        vnode.add_op(Dense(1))

        struct.set_ops([0])

        falias = 'test_keras_search_spaceure'
        struct.draw_graphviz(f'{falias}.dot')

        model = struct.create_model()
        from tensorflow.keras.utils import plot_model

        plot_model(model, to_file=f'{falias}.png', show_shapes=True)
Beispiel #24
0
def create_search_space(
        input_shape=(2, ), output_shape=(5, ), *args, **kwargs):

    ss = KSearchSpace(input_shape, output_shape)
    x = ss.input_nodes[0]

    nunits = 10

    hid_2 = ConstantNode(op=Dense(nunits, "relu"), name="hid_2")
    out_2 = ConstantNode(op=Dense(1), name="out_2")
    ss.connect(hid_2, out_2)

    hid_3 = ConstantNode(op=Dense(nunits, "relu"), name="hid_3")
    out_3 = ConstantNode(op=Dense(1), name="out_3")
    ss.connect(hid_3, out_3)

    hid_4 = ConstantNode(op=Dense(nunits, "relu"), name="hid_4")
    out_4 = ConstantNode(op=Dense(1), name="out_4")
    ss.connect(hid_4, out_4)

    hid_5 = ConstantNode(op=Dense(nunits, "relu"), name="hid_5")
    out_5 = ConstantNode(op=Dense(1), name="out_5")
    ss.connect(hid_5, out_5)

    hid_6 = ConstantNode(op=Dense(nunits, "relu"), name="hid_6")
    out_6 = ConstantNode(op=Dense(1), name="out_6")
    ss.connect(hid_6, out_6)

    # L1 DEPENDENT ON DATA OVERALL
    in_2 = VariableNode(name="in_2")
    in_2.add_op(Concatenate(ss, [x]))
    in_2.add_op(Concatenate(ss, [x, out_3]))
    in_2.add_op(Concatenate(ss, [x, out_4]))
    in_2.add_op(Concatenate(ss, [x, out_5]))
    in_2.add_op(Concatenate(ss, [x, out_6]))

    ss.connect(in_2, hid_2)

    # L2 DEPENDANT ON DATA, L1 AND L3
    in_3 = VariableNode(name="in_3")
    in_3.add_op(Concatenate(ss, [x, out_2]))
    in_3.add_op(Concatenate(ss, [x, out_4]))

    ss.connect(in_3, hid_3)

    # L3 DEPENDANT ON DATA, L1 L2 AND WIDTH
    in_4 = VariableNode(name="in_4")
    in_4.add_op(Concatenate(ss, [x]))
    in_4.add_op(Concatenate(ss, [x, out_2]))
    in_4.add_op(Concatenate(ss, [x, out_3]))
    in_4.add_op(Concatenate(ss, [x, out_5]))
    in_4.add_op(Concatenate(ss, [x, out_6]))
    in_4.add_op(Concatenate(ss, [x, out_2, out_3]))
    in_4.add_op(Concatenate(ss, [x, out_2, out_5]))
    in_4.add_op(Concatenate(ss, [x, out_2, out_6]))
    in_4.add_op(Concatenate(ss, [x, out_3, out_5]))
    in_4.add_op(Concatenate(ss, [x, out_3, out_6]))
    in_4.add_op(Concatenate(ss, [x, out_5, out_6]))
    in_4.add_op(Concatenate(ss, [x, out_3, out_5, out_6]))
    in_4.add_op(Concatenate(ss, [x, out_2, out_5, out_6]))
    in_4.add_op(Concatenate(ss, [x, out_2, out_3, out_6]))
    in_4.add_op(Concatenate(ss, [x, out_2, out_3, out_5]))
    in_4.add_op(Concatenate(ss, [x, out_2, out_3, out_5, out_6]))

    ss.connect(in_4, hid_4)

    # HEIGHT DEPENDANT ON ALL MEASURES COMBINED AND DATA
    in_5 = VariableNode(name="in_5")
    in_5.add_op(Concatenate(ss, [x]))
    in_5.add_op(Concatenate(ss, [x, out_2]))
    in_5.add_op(Concatenate(ss, [x, out_3]))
    in_5.add_op(Concatenate(ss, [x, out_4]))
    in_5.add_op(Concatenate(ss, [x, out_6]))
    in_5.add_op(Concatenate(ss, [x, out_2, out_3]))
    in_5.add_op(Concatenate(ss, [x, out_2, out_4]))
    in_5.add_op(Concatenate(ss, [x, out_2, out_6]))
    in_5.add_op(Concatenate(ss, [x, out_3, out_4]))
    in_5.add_op(Concatenate(ss, [x, out_3, out_6]))
    in_5.add_op(Concatenate(ss, [x, out_4, out_6]))
    in_5.add_op(Concatenate(ss, [x, out_3, out_4, out_6]))
    in_5.add_op(Concatenate(ss, [x, out_2, out_4, out_6]))
    in_5.add_op(Concatenate(ss, [x, out_2, out_3, out_6]))
    in_5.add_op(Concatenate(ss, [x, out_2, out_3, out_4]))
    in_5.add_op(Concatenate(ss, [x, out_2, out_3, out_4, out_6]))
    ss.connect(in_5, hid_5)

    # WIDTH DEPENDANT ON DATA AND CROSS SECTION
    in_6 = VariableNode(name="in_6")
    in_6.add_op(Concatenate(ss, [x]))
    in_6.add_op(Concatenate(ss, [x, out_2]))
    in_6.add_op(Concatenate(ss, [x, out_3]))
    in_6.add_op(Concatenate(ss, [x, out_4]))
    in_6.add_op(Concatenate(ss, [x, out_5]))
    ss.connect(in_6, hid_6)

    out = ConstantNode(name="OUT")
    out.set_op(
        Concatenate(ss, stacked_nodes=[out_2, out_3, out_4, out_5, out_6]))

    return ss
Beispiel #25
0
def create_mlp_node(node):
    node.add_op(Dense(1000, tf.nn.relu))
Beispiel #26
0
def create_search_space(input_shape=None,
                        output_shape=None,
                        num_mpnn_cells=3,
                        num_dense_layers=2,
                        **kwargs):
    """Create a search space containing multiple Keras architectures

    Args:
        input_shape (list): the input shapes, e.g. [(3, 4), (5, 2)].
        output_shape (tuple): the output shape, e.g. (12, ).
        num_mpnn_cells (int): the number of MPNN cells.
        num_dense_layers (int): the number of Dense layers.

    Returns:
        A search space containing multiple Keras architectures
    """
    data = kwargs['data']
    if data == 'qm7':
        input_shape = [(8 + 1, 75), (8 + 1 + 10 + 1, 2), (8 + 1 + 10 + 1, 14),
                       (8 + 1, ), (8 + 1 + 10 + 1, )]
        output_shape = (1, )
    elif data == 'qm8':
        input_shape = [(9 + 1, 75), (9 + 1 + 14 + 1, 2), (9 + 1 + 14 + 1, 14),
                       (9 + 1, ), (9 + 1 + 14 + 1, )]
        output_shape = (16, )
    elif data == 'qm9':
        input_shape = [(9 + 1, 75), (9 + 1 + 16 + 1, 2), (9 + 1 + 16 + 1, 14),
                       (9 + 1, ), (9 + 1 + 16 + 1, )]
        output_shape = (12, )
    elif data == 'freesolv':
        input_shape = [(24 + 1, 75), (24 + 1 + 25 + 1, 2),
                       (24 + 1 + 25 + 1, 14), (24 + 1, ), (24 + 1 + 25 + 1, )]
        output_shape = (1, )
    elif data == 'esol':
        input_shape = [(55 + 1, 75), (55 + 1 + 68 + 1, 2),
                       (55 + 1 + 68 + 1, 14), (55 + 1, ), (55 + 1 + 68 + 1, )]
        output_shape = (1, )
    elif data == 'lipo':
        input_shape = [(115 + 1, 75), (115 + 1 + 236 + 1, 2),
                       (115 + 1 + 236 + 1, 14), (115 + 1, ),
                       (115 + 1 + 236 + 1, )]
        output_shape = (1, )
    arch = KSearchSpace(input_shape, output_shape, regression=True)
    source = prev_input = arch.input_nodes[0]
    prev_input1 = arch.input_nodes[1]
    prev_input2 = arch.input_nodes[2]
    prev_input3 = arch.input_nodes[3]
    prev_input4 = arch.input_nodes[4]

    # look over skip connections within a range of the 3 previous nodes
    anchor_points = collections.deque([source], maxlen=3)

    count_gcn_layers = 0
    count_dense_layers = 0
    for _ in range(num_mpnn_cells):
        graph_attn_cell = VariableNode()
        mpnn_cell(graph_attn_cell)  #
        arch.connect(prev_input, graph_attn_cell)
        arch.connect(prev_input1, graph_attn_cell)
        arch.connect(prev_input2, graph_attn_cell)
        arch.connect(prev_input3, graph_attn_cell)
        arch.connect(prev_input4, graph_attn_cell)

        cell_output = graph_attn_cell
        cmerge = ConstantNode()
        cmerge.set_op(AddByProjecting(arch, [cell_output], activation="relu"))

        for anchor in anchor_points:
            skipco = VariableNode()
            skipco.add_op(Tensor([]))
            skipco.add_op(Connect(arch, anchor))
            arch.connect(skipco, cmerge)

        prev_input = cmerge
        anchor_points.append(prev_input)
        count_gcn_layers += 1

    global_pooling_node = VariableNode()
    gather_cell(global_pooling_node)
    arch.connect(prev_input, global_pooling_node)
    prev_input = global_pooling_node

    flatten_node = ConstantNode()
    flatten_node.set_op(Flatten())
    arch.connect(prev_input, flatten_node)
    prev_input = flatten_node

    for _ in range(num_dense_layers):
        dense_node = ConstantNode()
        dense_node.set_op(Dense(32, activation='relu'))
        arch.connect(prev_input, dense_node)
        prev_input = dense_node
        count_dense_layers += 1

    output_node = ConstantNode()
    output_node.set_op(Dense(output_shape[0], activation='linear'))
    arch.connect(prev_input, output_node)

    return arch
Beispiel #27
0
def create_search_space(input_shape = None,
                        output_shape = None,
                        num_mpnn_layers=3,
                        num_dense_layers=2,
                        **kwargs):
    """
    A function to create keras search sapce
    Args:
        input_shape: list of tuples
        output_shape: a tuple
        num_mpnn_layers: int, number of graph massage passing neural network layers
        num_dense_layers: int, number of dense layers
        **kwargs:
            data: str, the dataset name

    Returns:
        arch: keras architecture
    """
    data = kwargs['data']
    if data == 'qm7':
        input_shape = [(9+1, 75), (9+1+16+1, 2), (9+1+16+1, 14), (9+1, ), (9+1+16+1, )]
        output_shape = (1, )
    arch = KSearchSpace(input_shape, output_shape, regression=True)
    source = prev_input = arch.input_nodes[0]
    prev_input1 = arch.input_nodes[1]
    prev_input2 = arch.input_nodes[2]
    prev_input3 = arch.input_nodes[3]
    prev_input4 = arch.input_nodes[4]
    # look over skip connections within a range of the 3 previous nodes
    anchor_points = collections.deque([source], maxlen=3)
    count_mpnn_layers = 0
    count_dense_layers = 0
    for _ in range(num_mpnn_layers):
        graph_attn_cell = VariableNode()
        add_mpnn_to_(graph_attn_cell)  #
        arch.connect(prev_input, graph_attn_cell)
        arch.connect(prev_input1, graph_attn_cell)
        arch.connect(prev_input2, graph_attn_cell)
        arch.connect(prev_input3, graph_attn_cell)
        arch.connect(prev_input4, graph_attn_cell)

        cell_output = graph_attn_cell
        cmerge = ConstantNode()
        cmerge.set_op(AddByProjecting(arch, [cell_output], activation="relu"))

        for anchor in anchor_points:
            skipco = VariableNode()
            skipco.add_op(Tensor([]))
            skipco.add_op(Connect(arch, anchor))
            arch.connect(skipco, cmerge)

        prev_input = cmerge
        anchor_points.append(prev_input)
        count_mpnn_layers += 1

    global_pooling_node = VariableNode()
    add_global_pooling_to_(global_pooling_node)
    arch.connect(prev_input, global_pooling_node)  # result from graph conv (?, 23, ?) --> Global pooling (?, 23)
    prev_input = global_pooling_node

    flatten_node = ConstantNode()
    flatten_node.set_op(Flatten())
    arch.connect(prev_input, flatten_node)  # result from graph conv (?, 23) --> Flatten
    prev_input = flatten_node

    for _ in range(num_dense_layers):
        dense_node = ConstantNode()
        dense_node.set_op(Dense(32, activation='relu'))
        arch.connect(prev_input, dense_node)
        prev_input = dense_node
        count_dense_layers += 1

    output_node = ConstantNode()
    output_node.set_op(Dense(output_shape[0], activation='linear'))
    arch.connect(prev_input, output_node)

    return arch