示例#1
0
        def parse_new_rnn():
            reset_parser()
            data = layer.data(
                name="word", type=data_type.dense_vector(dict_dim))
            label = layer.data(
                name="label", type=data_type.dense_vector(label_dim))
            emb = layer.embedding(input=data, size=word_dim)
            boot_layer = layer.data(
                name="boot", type=data_type.dense_vector(10))
            boot_layer = layer.fc(name='boot_fc', input=boot_layer, size=10)

            def step(y, wid):
                z = layer.embedding(input=wid, size=word_dim)
                mem = layer.memory(
                    name="rnn_state", size=hidden_dim, boot_layer=boot_layer)
                out = layer.fc(input=[y, z, mem],
                               size=hidden_dim,
                               act=activation.Tanh(),
                               bias_attr=True,
                               name="rnn_state")
                return out

            out = layer.recurrent_group(
                name="rnn", step=step, input=[emb, data])

            rep = layer.last_seq(input=out)
            prob = layer.fc(size=label_dim,
                            input=rep,
                            act=activation.Softmax(),
                            bias_attr=True)

            cost = layer.classification_cost(input=prob, label=label)

            return str(layer.parse_network(cost))
示例#2
0
        def parse_new_rnn():
            data = layer.data(name="word",
                              type=data_type.dense_vector(dict_dim))
            label = layer.data(name="label",
                               type=data_type.dense_vector(label_dim))
            emb = layer.embedding(input=data, size=word_dim)
            boot_layer = layer.data(name="boot",
                                    type=data_type.dense_vector(10))
            boot_layer = layer.fc(name='boot_fc', input=boot_layer, size=10)

            def step(y, wid):
                z = layer.embedding(input=wid, size=word_dim)
                mem = layer.memory(name="rnn_state",
                                   size=hidden_dim,
                                   boot_layer=boot_layer)
                out = layer.fc(input=[y, z, mem],
                               size=hidden_dim,
                               act=activation.Tanh(),
                               bias_attr=True,
                               name="rnn_state")
                return out

            out = layer.recurrent_group(name="rnn",
                                        step=step,
                                        input=[emb, data])

            rep = layer.last_seq(input=out)
            prob = layer.fc(size=label_dim,
                            input=rep,
                            act=activation.Softmax(),
                            bias_attr=True)

            cost = layer.classification_cost(input=prob, label=label)

            return str(layer.parse_network(cost))
示例#3
0
 def test_aggregate_layer(self):
     pool = layer.pooling(input=pixel,
                          pooling_type=pooling.Avg(),
                          agg_level=layer.AggregateLevel.EACH_SEQUENCE)
     last_seq = layer.last_seq(input=pixel)
     first_seq = layer.first_seq(input=pixel)
     concat = layer.concat(input=[last_seq, first_seq])
     seq_concat = layer.seq_concat(a=last_seq, b=first_seq)
     print layer.parse_network(pool, last_seq, first_seq, concat,
                               seq_concat)
示例#4
0
 def test_aggregate_layer(self):
     pool = layer.pooling(
         input=pixel,
         pooling_type=pooling.Avg(),
         agg_level=layer.AggregateLevel.TO_SEQUENCE)
     last_seq = layer.last_seq(input=pixel)
     first_seq = layer.first_seq(input=pixel)
     concat = layer.concat(input=[last_seq, first_seq])
     seq_concat = layer.seq_concat(a=last_seq, b=first_seq)
     print layer.parse_network(
         [pool, last_seq, first_seq, concat, seq_concat])