Exemplo n.º 1
0
    def __init__(self,
                 model: dy.ParameterCollection,
                 in_dim: int,
                 hid_dim: int,
                 p: int = 0.1):

        pc = model.add_subcollection()
        self.W1 = Linear(pc, in_dim, hid_dim)
        self.W2 = Linear(pc, hid_dim, in_dim)
        self.p = p
        self.pc = pc
        self.spec = (in_dim, hid_dim, p)
Exemplo n.º 2
0
    def __init__(self,
                 model: dy.ParameterCollection,
                 in_dim: int,
                 out_dim: int,
                 init: dy.PyInitializer = None,
                 bias: bool = True):

        pc = model.add_subcollection()
        if not init: init = dy.UniformInitializer(math.sqrt(in_dim))
        self.W = pc.add_parameters((out_dim, in_dim), init=init)
        if bias: self.b = pc.add_parameters((out_dim, ), init=init)
        self.pc = pc
        self.bias = bias
Exemplo n.º 3
0
    def __init__(self, model: dy.ParameterCollection, cfg: IniConfigurator,
                 vocabulary: Vocabulary):

        pc = model.add_subcollection()
        # MLP layer
        orth_init = OrthogonalInitializer
        self.head_arc_MLP = MLP(pc, cfg.ARC_MLP_SIZE, leaky_relu, cfg.MLP_DROP,
                                cfg.MLP_BIAS, orth_init)
        self.head_rel_MLP = MLP(pc, cfg.REL_MLP_SIZE, leaky_relu, cfg.MLP_DROP,
                                cfg.MLP_BIAS, orth_init)
        self.dept_arc_MLP = MLP(pc, cfg.ARC_MLP_SIZE, leaky_relu, cfg.MLP_DROP,
                                cfg.MLP_BIAS, orth_init)
        self.dept_rel_MLP = MLP(pc, cfg.REL_MLP_SIZE, leaky_relu, cfg.MLP_DROP,
                                cfg.MLP_BIAS, orth_init)

        # Biaffine Attention Layer (Arc)
        arc_size = cfg.ARC_MLP_SIZE[-1]
        zero_init = dy.ConstInitializer(0)
        self.arc_attn_mat = [
            BiaffineMatAttention(pc, arc_size, arc_size, 1, True, False,
                                 zero_init)
            for _ in range(cfg.GRAPH_LAYERS + 1)
        ]

        # Biaffine Attention Layer (Rel)
        rel_num = vocabulary.get_vocab_size('rel')
        rel_size = cfg.REL_MLP_SIZE[-1]
        self.rel_mask = np.array([1] + [0] *
                                 (rel_num - 1))  # mask root relation
        self.rel_attn = BiaffineMatAttention(pc, rel_size, rel_size, rel_num,
                                             True, True, zero_init)

        # Graph Network Layer
        self.head_gnn = GraphNNUnit(pc, arc_size, arc_size, leaky_relu,
                                    orth_init)
        self.dept_gnn = GraphNNUnit(pc, arc_size, arc_size, leaky_relu,
                                    orth_init)
        self.head_rel_gnn = GraphNNUnit(pc, rel_size, rel_size, leaky_relu,
                                        orth_init)
        self.dept_rel_gnn = GraphNNUnit(pc, rel_size, rel_size, leaky_relu,
                                        orth_init)

        # Graph Layer WarmUp
        self.warm_list = [
            -i * cfg.WARM for i in range(cfg.GRAPH_LAYERS, -1, -1)
        ]

        # Save Variable
        self.arc_size, self.rel_size, self.rel_num = arc_size, rel_size, rel_num
        self.pc, self.cfg = pc, cfg
        self.spec = (cfg, vocabulary)
Exemplo n.º 4
0
    def __init__(self,
                 model: dy.ParameterCollection,
                 in_dim: int,
                 out_dim: int,
                 bias: bool = True,
                 init: dy.PyInitializer = dy.GlorotInitializer()):

        pc = model.add_subcollection()
        init = init_wrap(init, (out_dim, in_dim))
        self.W = pc.add_parameters((out_dim, in_dim), init=init)
        if bias:
            self.b = pc.add_parameters((out_dim, ), init=0)
        self.pc = pc
        self.bias = bias
        self.spec = (in_dim, out_dim, bias, init)
Exemplo n.º 5
0
    def __init__(
            self,
            model: dy.ParameterCollection,
            h_dim: int,
            d_dim: int,
            f=dy.tanh,
            init: dy.PyInitializer = dy.GlorotInitializer()):

        pc = model.add_subcollection()
        init_W = init_wrap(init, (h_dim, d_dim))
        self.W = pc.add_parameters((h_dim, d_dim), init=init_W)
        init_B = init_wrap(init, (h_dim, h_dim))
        self.B = pc.add_parameters((h_dim, h_dim), init=init_B)

        self.pc, self.f = pc, f
        self.spec = (h_dim, d_dim, f, init)
Exemplo n.º 6
0
    def __init__(self,
                 model: dy.ParameterCollection,
                 sizes: List[int],
                 f: 'nonlinear' = dy.tanh,
                 p: float = 0.0,
                 bias: bool = True,
                 init: dy.PyInitializer = dy.GlorotInitializer()):

        pc = model.add_subcollection()
        self.W = [
            pc.add_parameters((x, y), init=init_wrap(init, (x, y)))
            for x, y in zip(sizes[1:], sizes[:-1])
        ]
        if bias:
            self.b = [pc.add_parameters((y, ), init=0) for y in sizes[1:]]

        self.pc, self.f, self.p, self.bias = pc, f, p, bias
        self.spec = (sizes, f, p, bias, init)