def parse_new_rnn(): reset_parser() data = layer.data( name="word", type=data_type.dense_vector(dict_dim)) label = layer.data( name="label", type=data_type.dense_vector(label_dim)) emb = layer.embedding(input=data, size=word_dim) boot_layer = layer.data( name="boot", type=data_type.dense_vector(10)) boot_layer = layer.fc(name='boot_fc', input=boot_layer, size=10) def step(y, wid): z = layer.embedding(input=wid, size=word_dim) mem = layer.memory( name="rnn_state", size=hidden_dim, boot_layer=boot_layer) out = layer.fc(input=[y, z, mem], size=hidden_dim, act=activation.Tanh(), bias_attr=True, name="rnn_state") return out out = layer.recurrent_group( name="rnn", step=step, input=[emb, data]) rep = layer.last_seq(input=out) prob = layer.fc(size=label_dim, input=rep, act=activation.Softmax(), bias_attr=True) cost = layer.classification_cost(input=prob, label=label) return str(layer.parse_network(cost))
def parse_new_rnn(): data = layer.data(name="word", type=data_type.dense_vector(dict_dim)) label = layer.data(name="label", type=data_type.dense_vector(label_dim)) emb = layer.embedding(input=data, size=word_dim) boot_layer = layer.data(name="boot", type=data_type.dense_vector(10)) boot_layer = layer.fc(name='boot_fc', input=boot_layer, size=10) def step(y, wid): z = layer.embedding(input=wid, size=word_dim) mem = layer.memory(name="rnn_state", size=hidden_dim, boot_layer=boot_layer) out = layer.fc(input=[y, z, mem], size=hidden_dim, act=activation.Tanh(), bias_attr=True, name="rnn_state") return out out = layer.recurrent_group(name="rnn", step=step, input=[emb, data]) rep = layer.last_seq(input=out) prob = layer.fc(size=label_dim, input=rep, act=activation.Softmax(), bias_attr=True) cost = layer.classification_cost(input=prob, label=label) return str(layer.parse_network(cost))
def step(y, wid): z = layer.embedding(input=wid, size=word_dim) mem = layer.memory( name="rnn_state", size=hidden_dim, boot_layer=boot_layer) out = layer.fc(input=[y, z, mem], size=hidden_dim, act=activation.Tanh(), bias_attr=True, name="rnn_state") return out
def step(y, wid): z = layer.embedding(input=wid, size=word_dim) mem = layer.memory(name="rnn_state", size=hidden_dim, boot_layer=boot_layer) out = layer.fc(input=[y, z, mem], size=hidden_dim, act=activation.Tanh(), bias_attr=True, name="rnn_state") return out
def get_embs(self, input): """ Get embeddings of token sequence. Args: - input: input sequence of tokens. Should be of type paddle.v2.data_type.integer_value_sequence Returns: The sequence of embeddings. """ embs = layer.embedding(input=input, size=self.emb_dim, param_attr=self.emb_param) return embs
def parse_new_rnn(): def new_step(y): mem = layer.memory(name="rnn_state", size=hidden_dim) out = layer.fc(input=[y, mem], size=hidden_dim, act=activation.Tanh(), bias_attr=True, name="rnn_state") return out data = layer.data( name="word", type=data_type.integer_value(dict_dim)) embd = layer.embedding(input=data, size=word_dim) rnn_layer = layer.recurrent_group( name="rnn", step=new_step, input=embd) return str(layer.parse_network(rnn_layer))
def parse_new_rnn(): def new_step(y): mem = layer.memory(name="rnn_state", size=hidden_dim) out = layer.fc(input=[y, mem], size=hidden_dim, act=activation.Tanh(), bias_attr=True, name="rnn_state") return out data = layer.data(name="word", type=data_type.integer_value(dict_dim)) embd = layer.embedding(input=data, size=word_dim) rnn_layer = layer.recurrent_group(name="rnn", step=new_step, input=embd) return str(layer.parse_network(rnn_layer))