예제 #1
0
def dropout(x, keep_prob, name=None):
    """
    Computes dropout.

      With probability `keep_prob`, outputs the input element scaled up by
      `1 / keep_prob`, otherwise outputs `0`.  The scaling is so that the expected
      sum is unchanged.

      Args:
        x: A tensor.
        keep_prob: A float. The probability that each element is kept.
        name: A name for this operation (optional).

      Returns:
        A Tensor of the same shape of `x`.
    """

    return ops.Dropout(x, 1 - keep_prob)
예제 #2
0
 def Setup(self, bottom):
     super(DropoutLayer, self).Setup(bottom)
     input = bottom[0] if isinstance(bottom, list) else bottom
     return ops.Dropout(input, **self._param)
예제 #3
0
파일: nn_ops.py 프로젝트: k9sret/Dragon
def dropout(x, keep_prob, name=None):
    return ops.Dropout(x, 1 - keep_prob)
예제 #4
0
파일: nn_ops.py 프로젝트: yyaqi/Dragon
def dropout(x, keep_prob, name=None):
    return _ops.Dropout(x, 1. - keep_prob, name=name)
예제 #5
0
파일: neuron.py 프로젝트: yyaqi/Dragon
 def LayerSetup(self, bottom):
     return _ops.Dropout(bottom, **self.arguments)