示例#1
0
        def parse_old_rnn():
            def step(y):
                mem = conf_helps.memory(name="rnn_state", size=hidden_dim)
                out = conf_helps.fc_layer(input=[y, mem],
                                          size=hidden_dim,
                                          act=activation.Tanh(),
                                          bias_attr=True,
                                          name="rnn_state")
                return out

            def test():
                data = conf_helps.data_layer(name="word", size=dict_dim)
                embd = conf_helps.embedding_layer(input=data, size=word_dim)
                conf_helps.recurrent_group(name="rnn", step=step, input=embd)

            return str(parse_network(test))
示例#2
0
        def parse_old_rnn():
            def step(y):
                mem = conf_helps.memory(name="rnn_state", size=hidden_dim)
                out = conf_helps.fc_layer(
                    input=[y, mem],
                    size=hidden_dim,
                    act=activation.Tanh(),
                    bias_attr=True,
                    name="rnn_state")
                return out

            def test():
                data = conf_helps.data_layer(name="word", size=dict_dim)
                embd = conf_helps.embedding_layer(input=data, size=word_dim)
                conf_helps.recurrent_group(name="rnn", step=step, input=embd)

            return str(parse_network(test))
示例#3
0
        def parse_old_rnn():
            reset_parser()

            def test():
                data = conf_helps.data_layer(name="word", size=dict_dim)
                label = conf_helps.data_layer(name="label", size=label_dim)
                emb = conf_helps.embedding_layer(input=data, size=word_dim)
                boot_layer = conf_helps.data_layer(name="boot", size=10)
                boot_layer = conf_helps.fc_layer(
                    name='boot_fc', input=boot_layer, size=10)

                def step(y, wid):
                    z = conf_helps.embedding_layer(input=wid, size=word_dim)
                    mem = conf_helps.memory(
                        name="rnn_state",
                        size=hidden_dim,
                        boot_layer=boot_layer)
                    out = conf_helps.fc_layer(
                        input=[y, z, mem],
                        size=hidden_dim,
                        act=conf_helps.TanhActivation(),
                        bias_attr=True,
                        name="rnn_state")
                    return out

                out = conf_helps.recurrent_group(
                    name="rnn", step=step, input=[emb, data])

                rep = conf_helps.last_seq(input=out)
                prob = conf_helps.fc_layer(
                    size=label_dim,
                    input=rep,
                    act=conf_helps.SoftmaxActivation(),
                    bias_attr=True)

                conf_helps.outputs(
                    conf_helps.classification_cost(
                        input=prob, label=label))

            return str(parse_network(test))
示例#4
0
        def parse_old_rnn():
            reset_parser()

            def test():
                data = conf_helps.data_layer(name="word", size=dict_dim)
                label = conf_helps.data_layer(name="label", size=label_dim)
                emb = conf_helps.embedding_layer(input=data, size=word_dim)
                boot_layer = conf_helps.data_layer(name="boot", size=10)
                boot_layer = conf_helps.fc_layer(name='boot_fc',
                                                 input=boot_layer,
                                                 size=10)

                def step(y, wid):
                    z = conf_helps.embedding_layer(input=wid, size=word_dim)
                    mem = conf_helps.memory(name="rnn_state",
                                            size=hidden_dim,
                                            boot_layer=boot_layer)
                    out = conf_helps.fc_layer(input=[y, z, mem],
                                              size=hidden_dim,
                                              act=conf_helps.TanhActivation(),
                                              bias_attr=True,
                                              name="rnn_state")
                    return out

                out = conf_helps.recurrent_group(name="rnn",
                                                 step=step,
                                                 input=[emb, data])

                rep = conf_helps.last_seq(input=out)
                prob = conf_helps.fc_layer(size=label_dim,
                                           input=rep,
                                           act=conf_helps.SoftmaxActivation(),
                                           bias_attr=True)

                conf_helps.outputs(
                    conf_helps.classification_cost(input=prob, label=label))

            return str(parse_network(test))