Example #1
0
    def __init__(self,
                 name_scope,
                 hidden_size,
                 param_attr=None,
                 bias_attr=None,
                 gate_activation=None,
                 activation=None,
                 forget_bias=1.0,
                 dtype='float32'):
        super(BasicLSTMUnit, self).__init__(name_scope, dtype)
        # reserve old school _full_name and _helper for static graph save load
        self._full_name = unique_name.generate(name_scope + "/" +
                                               self.__class__.__name__)
        self._helper = LayerObjectHelper(self._full_name)

        self._name = name_scope
        self._hiden_size = hidden_size
        self._param_attr = param_attr
        self._bias_attr = bias_attr
        self._gate_activation = gate_activation or layers.sigmoid
        self._activation = activation or layers.tanh
        self._forget_bias = layers.fill_constant([1],
                                                 dtype=dtype,
                                                 value=forget_bias)
        self._forget_bias.stop_gradient = False
        self._dtype = dtype
Example #2
0
 def func_append_activation_in_dygraph3(self):
     a_np = np.random.random(size=(10, 20, 30)).astype(np.float32)
     helper = LayerObjectHelper(fluid.unique_name.generate("test"))
     func = helper.append_activation
     with fluid.dygraph.guard():
         a = paddle.to_tensor(a_np)
         res1 = func(a, act="sigmoid", use_cudnn=True)
         res2 = fluid.layers.sigmoid(a)
         self.assertTrue(np.array_equal(res1.numpy(), res2.numpy()))