Exemplo n.º 1
0
def binary_cross_entropy_with_logits(input, target, weight=None, reduction='mean', pos_weight=None):
    max_val = ops.maximum(-input, 0)

    if pos_weight is not None:
        log_weight = ((pos_weight - 1) * target) + 1
        loss = (1 - target) * input
        loss_1 = ops.log(ops.exp(-max_val) + ops.exp(-input - max_val)) + max_val
        loss += log_weight * loss_1
    else:
        loss = (1 - target) * input
        loss += max_val
        loss += ops.log(ops.exp(-max_val) + ops.exp(-input - max_val))
 
    if weight is not None:
        output = loss * weight
    else:
        output = loss

    if reduction == "mean":
        return ops.reduce_mean(output)
    elif reduction == "sum":
        return ops.reduce_sum(output)
    else:
        return output
Exemplo n.º 2
0
def threshold(input, threshold):
    return ops.maximum(input, threshold)
Exemplo n.º 3
0
def prelu(input, weight):
    return ops.maximum(0, input) + weight * ops.minimum(0, input)
Exemplo n.º 4
0
def leaky_relu(input, negative_slope=0.01):
    return ops.maximum(0, input) + negative_slope * ops.minimum(0, input)
Exemplo n.º 5
0
def celu(input, alpha=1.0):
    return ops.maximum(0, input) + ops.minimum(0, alpha * (ops.exp(input / alpha) - 1))
Exemplo n.º 6
0
def selu(input):
    return 1.0507009873554804934193349852946 * \
        (ops.maximum(0, input) + ops.minimum(0, 1.6732632423543772848170429916717 * (ops.exp(input) - 1)))
Exemplo n.º 7
0
def hardtanh(input, min_val=- 1.0, max_val=1.0):
    return ops.maximum(ops.minimum(input, max_val), min_val)