コード例 #1
0
    def Gelu(self, x):
        '''Implementation for GELU Activation Function

        Arguments:
            x (tensor):
                Input tensor
        Returns:
            Tensor, output of 'GELU' activation function
        '''
        normal = Normal(loc=0., scale=1.)
        return x * normal.cdf(x)
コード例 #2
0
def probit(x):
    normal = TFNormal(loc=0., scale=1.)
    return normal.cdf(x)