def step(y): mem = conf_helps.memory(name="rnn_state", size=hidden_dim) out = conf_helps.fc_layer(input=[y, mem], size=hidden_dim, act=activation.Tanh(), bias_attr=True, name="rnn_state") return out
def step(y): mem = conf_helps.memory(name="rnn_state", size=hidden_dim) out = conf_helps.fc_layer( input=[y, mem], size=hidden_dim, act=activation.Tanh(), bias_attr=True, name="rnn_state") return out
def step(y, wid): z = conf_helps.embedding_layer(input=wid, size=word_dim) mem = conf_helps.memory(name="rnn_state", size=hidden_dim, boot_layer=boot_layer) out = conf_helps.fc_layer(input=[y, z, mem], size=hidden_dim, act=conf_helps.TanhActivation(), bias_attr=True, name="rnn_state") return out
def step(y, wid): z = conf_helps.embedding_layer(input=wid, size=word_dim) mem = conf_helps.memory( name="rnn_state", size=hidden_dim, boot_layer=boot_layer) out = conf_helps.fc_layer( input=[y, z, mem], size=hidden_dim, act=conf_helps.TanhActivation(), bias_attr=True, name="rnn_state") return out
def to_proto_impl(self, **kwargs): args = dict() for each in kwargs: args[each] = kwargs[each] for each in self.__kwargs__: args[each] = self.__kwargs__[each] if self.__boot_layer_name__ is not None: args['boot_layer'] = self.__context__[self.__boot_layer_name__] size = args.get('size', None) if size is not None: if callable(size): real_size = size() else: real_size = size args['size'] = real_size return conf_helps.memory(name=self.name, **args)