Beispiel #1
0
            def test():
                data = conf_helps.data_layer(name="word", size=dict_dim)
                label = conf_helps.data_layer(name="label", size=label_dim)
                emb = conf_helps.embedding_layer(input=data, size=word_dim)
                boot_layer = conf_helps.data_layer(name="boot", size=10)
                boot_layer = conf_helps.fc_layer(
                    name='boot_fc', input=boot_layer, size=10)

                def step(y, wid):
                    z = conf_helps.embedding_layer(input=wid, size=word_dim)
                    mem = conf_helps.memory(
                        name="rnn_state",
                        size=hidden_dim,
                        boot_layer=boot_layer)
                    out = conf_helps.fc_layer(
                        input=[y, z, mem],
                        size=hidden_dim,
                        act=conf_helps.TanhActivation(),
                        bias_attr=True,
                        name="rnn_state")
                    return out

                out = conf_helps.recurrent_group(
                    name="rnn", step=step, input=[emb, data])

                rep = conf_helps.last_seq(input=out)
                prob = conf_helps.fc_layer(
                    size=label_dim,
                    input=rep,
                    act=conf_helps.SoftmaxActivation(),
                    bias_attr=True)

                conf_helps.outputs(
                    conf_helps.classification_cost(
                        input=prob, label=label))
Beispiel #2
0
            def test():
                data = conf_helps.data_layer(name="word", size=dict_dim)
                label = conf_helps.data_layer(name="label", size=label_dim)
                emb = conf_helps.embedding_layer(input=data, size=word_dim)
                boot_layer = conf_helps.data_layer(name="boot", size=10)
                boot_layer = conf_helps.fc_layer(name='boot_fc',
                                                 input=boot_layer,
                                                 size=10)

                def step(y, wid):
                    z = conf_helps.embedding_layer(input=wid, size=word_dim)
                    mem = conf_helps.memory(name="rnn_state",
                                            size=hidden_dim,
                                            boot_layer=boot_layer)
                    out = conf_helps.fc_layer(input=[y, z, mem],
                                              size=hidden_dim,
                                              act=conf_helps.TanhActivation(),
                                              bias_attr=True,
                                              name="rnn_state")
                    return out

                out = conf_helps.recurrent_group(name="rnn",
                                                 step=step,
                                                 input=[emb, data])

                rep = conf_helps.last_seq(input=out)
                prob = conf_helps.fc_layer(size=label_dim,
                                           input=rep,
                                           act=conf_helps.SoftmaxActivation(),
                                           bias_attr=True)

                conf_helps.outputs(
                    conf_helps.classification_cost(input=prob, label=label))
Beispiel #3
0
 def test():
     data = conf_helps.data_layer(name="word", size=dict_dim)
     embd = conf_helps.embedding_layer(input=data, size=word_dim)
     conf_helps.recurrent_group(name="rnn",
                                step=step,
                                input=embd,
                                reverse=True)
Beispiel #4
0
 def step(y, wid):
     z = conf_helps.embedding_layer(input=wid, size=word_dim)
     mem = conf_helps.memory(name="rnn_state",
                             size=hidden_dim,
                             boot_layer=boot_layer)
     out = conf_helps.fc_layer(input=[y, z, mem],
                               size=hidden_dim,
                               act=conf_helps.TanhActivation(),
                               bias_attr=True,
                               name="rnn_state")
     return out
Beispiel #5
0
 def step(y, wid):
     z = conf_helps.embedding_layer(input=wid, size=word_dim)
     mem = conf_helps.memory(
         name="rnn_state",
         size=hidden_dim,
         boot_layer=boot_layer)
     out = conf_helps.fc_layer(
         input=[y, z, mem],
         size=hidden_dim,
         act=conf_helps.TanhActivation(),
         bias_attr=True,
         name="rnn_state")
     return out
Beispiel #6
0
 def test():
     data = conf_helps.data_layer(name="word", size=dict_dim)
     embd = conf_helps.embedding_layer(input=data, size=word_dim)
     conf_helps.recurrent_group(
         name="rnn", step=step, input=embd, reverse=True)