コード例 #1
0
ファイル: Train.py プロジェクト: MiRA-lab-dev/SynRec
def weighted_binary_crossentropy_loss(y_true, y_pred):
    _epsilon = tf.convert_to_tensor(epsilon(), y_pred.dtype.base_dtype)
    y_pred_clip = tf.clip_by_value(y_pred, _epsilon, 1 - _epsilon)
    y_pred_logits = tf.log(y_pred_clip / (1 - y_pred_clip))
    element_wise_loss = tf.nn.weighted_cross_entropy_with_logits(
        targets=y_true, logits=y_pred_logits, pos_weight=10)
    return K.mean(element_wise_loss, axis=-1)
コード例 #2
0
 def compute_loss(self, y_true, y_pred):
     _epsilon = tf.convert_to_tensor(epsilon(),
                                     dtype=y_pred.dtype.base_dtype)
     y_pred = tf.clip_by_value(y_pred, _epsilon, 1 - _epsilon)
     loss = -self.w_class1 * y_true * tf.log(y_pred) \
            -self.w_class0 * (1 - y_true) * tf.log(1 - y_pred)
     return tf.reduce_mean(loss)
コード例 #3
0
def binary_crossentropy_weight(target, output, from_logits=False):
    if not from_logits:
        # transform back to logits
        _epsilon = tf.convert_to_tensor(epsilon(), output.dtype.base_dtype)
        output = tf.clip_by_value(output, _epsilon, 1 - _epsilon)
        output = tf.log(output / (1 - output))
    return tf.nn.weighted_cross_entropy_with_logits(targets=target,
                                                    logits=output,
                                                    pos_weight=weight)
コード例 #4
0
ファイル: loss.py プロジェクト: CVaranese/SteveTheBot
def entropy_categorical_crossentropy(target, output):
    output /= tf.reduce_sum(output,
                            axis=len(output.get_shape()) - 1,
                            keep_dims=True)

    _epsilon = _to_tensor(epsilon(), output.dtype.base_dtype)
    output = tf.clip_by_value(output, _epsilon, 1. - _epsilon)
    return - tf.reduce_sum((target - .001*output) * tf.log(output),
                           axis=len(output.get_shape()) - 1)
コード例 #5
0
def cat_cross_inv(y_true, y_pred):
    #like the normal categorical crossentropy, but labels are reversed!
    #So this is how wrong the network was
    y_true = 1 - y_true
    axis = -1
    y_pred /= tf.reduce_sum(y_pred, axis, True)
    # manual computation of crossentropy
    _epsilon = _to_tensor(epsilon(), y_pred.dtype.base_dtype)
    y_pred = tf.clip_by_value(y_pred, _epsilon, 1. - _epsilon)
    return -tf.reduce_sum(y_true * tf.log(y_pred), axis)
コード例 #6
0
ファイル: __init__.py プロジェクト: whztt07/plaidml
def categorical_crossentropy(target, output, from_logits=False):
    if from_logits:
        output = softmax(output)
    elif output.opname != 'softmax':
        output /= sum(output, axis=(-1,), keepdims=True)
        output = clip(output, epsilon(), 1.0 - epsilon())
    T = target.tensor
    O = output.tensor
    ndims = O.shape.ndims
    fixed_dims = edsl.TensorDims(ndims - 1)
    fixed_idxs = edsl.TensorIndexes(ndims - 1)
    Y = edsl.TensorDim()
    y = edsl.TensorIndex()
    input_dims = fixed_dims + [Y]
    O.bind_dims(*input_dims)
    T.bind_dims(*input_dims)
    LO = edsl.log(O)
    TR = edsl.TensorOutput(*fixed_dims)
    TR[fixed_idxs] += T[fixed_idxs + [y]] * LO[fixed_idxs + [y]]
    R = -TR
    return _KerasNode('categorical_crossentropy', tensor=R)
コード例 #7
0
def weighted_binary_crossentropy(target, output, weights, from_logits=False):
    from keras.backend.common import epsilon

    # Note: tf.nn.sigmoid_cross_entropy_with_logits
    #  expects logits, Keras expects probabilities.
    if not from_logits:
        # transform back to logits
        _epsilon = tf.convert_to_tensor(epsilon(), output.dtype.base_dtype)
        output = tf.clip_by_value(output, _epsilon, 1 - _epsilon)
        output = tf.log(output / (1 - output))

    return tf.nn.weighted_cross_entropy_with_logits(targets=target,
                                                    logits=output,
                                                    pos_weight=weights)
コード例 #8
0
ファイル: losses.py プロジェクト: prhbrt/yeast-cell-detection
def auto_weighting_binary_crossentropy(target, output, from_logits=False):
    if not from_logits:
        _epsilon = _to_tensor(epsilon(), output.dtype.base_dtype)
        output = tf.clip_by_value(output, _epsilon, 1 - _epsilon)
        output = tf.math.log(output / (1 - output))
    
    subsample = tf.dtypes.cast(tf.math.logical_or(
      tf.less(K.random_uniform(K.shape(target), minval=0, maxval=1), 0.02),
      tf.greater(target, 0.5)
    ), float32)
    
    r = tf.nn.sigmoid_cross_entropy_with_logits(
        labels=target, logits=output)
    return r * subsample / 0.04
コード例 #9
0
def custom_categorical_crossentropy(target,
                                    output,
                                    from_logits=False,
                                    delta=1e-7):
    if not from_logits:
        output /= tf.reduce_sum(output,
                                axis=len(output.get_shape()) - 1,
                                keep_dims=True)
        _epsilon = _to_tensor(epsilon(), output.dtype.base_dtype)
        output = tf.clip_by_value(output, _epsilon, 1. - _epsilon)
        return -tf.reduce_sum(target * tf.log(output + delta),
                              axis=len(output.get_shape()) - 1)
    else:
        return tf.nn.softmax_cross_entropy_with_logits(labels=target,
                                                       logits=output)
コード例 #10
0
ファイル: losses.py プロジェクト: prhbrt/yeast-cell-detection
def normalized_binary_crossentropy(target, output, from_logits=False):
    if not from_logits:
        _epsilon = _to_tensor(epsilon(), output.dtype.base_dtype)
        output = tf.clip_by_value(output, _epsilon, 1 - _epsilon)
        output = tf.math.log(output / (1 - output))
    
    w0 = tf.dtypes.cast(K.sum(target), float32)
    s = K.shape(target)
    w1 = tf.dtypes.cast(s[0] * s[1] * s[2] * s[3], float32) - w0
    r = K.sqrt(w0*w0 + w1*w1)
    w0, w1 = w0 / r, w1 / r
    target2 = tf.dtypes.cast(target, float32)
    w = w0 * (1. - target2) + w1 * target2
    
    r = tf.nn.sigmoid_cross_entropy_with_logits(
        labels=target, logits=output)
    return w * r
コード例 #11
0
    def iou(target, output):
        output /= tf.reduce_sum(output, len(output.get_shape()) - 1, True)
        intersection = tf.reduce_sum(target * output, 1)
        intersection = intersection * tf.constant([0.0, 1.0])
        intersection = tf.reduce_sum(intersection, 1)

        den1 = tf.reduce_sum(target, 1) * tf.constant([0.0, 1.0])
        den2 = tf.reduce_sum(output, 1) * tf.constant([0.0, 1.0])
        den1 = tf.reduce_sum(den1, 1)
        den2 = tf.reduce_sum(den2, 1)
        score1 = intersection / (den1 + den2 - intersection + epsilon())

        back_grd = 1.0 - K.clip(den1, 0, 1)
        score2 = tf.reduce_mean(output, 1) * tf.constant([1.0, 0.0])
        score2 = tf.reduce_sum(score2, 1)
        score2 = score2 * back_grd

        return score1 + score2
コード例 #12
0
def ORIGINAL_categorical_crossentropy(target,
                                      output,
                                      from_logits=False,
                                      axis=-1):
    """Categorical crossentropy between an output tensor and a target tensor.
    # Arguments
        target: A tensor of the same shape as `output`.
        output: A tensor resulting from a softmax
            (unless `from_logits` is True, in which
            case `output` is expected to be the logits).
        from_logits: Boolean, whether `output` is the
            result of a softmax, or is a tensor of logits.
        axis: Int specifying the channels axis. `axis=-1`
            corresponds to data format `channels_last`,
            and `axis=1` corresponds to data format
            `channels_first`.
    # Returns
        Output tensor.
    # Raises
        ValueError: if `axis` is neither -1 nor one of
            the axes of `output`.
    """
    output_dimensions = list(range(len(output.get_shape())))
    if axis != -1 and axis not in output_dimensions:
        raise ValueError('{}{}{}'.format(
            'Unexpected channels axis {}. '.format(axis),
            'Expected to be -1 or one of the axes of `output`, ',
            'which has {} dimensions.'.format(len(output.get_shape()))))
    # Note: tf.nn.softmax_cross_entropy_with_logits
    # expects logits, Keras expects probabilities.
    if not from_logits:
        # scale preds so that the class probas of each sample sum to 1
        output /= tf.reduce_sum(output, axis, True)
        # manual computation of crossentropy
        _epsilon = tf.convert_to_tensor(epsilon(),
                                        dtype=output.dtype.base_dtype)
        output = tf.clip_by_value(output, _epsilon, 1. - _epsilon)
        losses = target * tf.log(output)
        #weighted_losses = target * tf.log(output) * weights
        return -tf.reduce_sum(losses, axis)
    else:
        return tf.nn.softmax_cross_entropy_with_logits(labels=target,
                                                       logits=output)
コード例 #13
0
ファイル: my_losses.py プロジェクト: boti996/onlab-public
def weighted_binary_crossentropy(y_true, y_pred):
    # scale preds so that the class probas of each sample sum to 1
    y_pred /= tf.reduce_sum(y_pred, len(y_pred.get_shape()) - 1, True)
    # manual computation of crossentropy
    _epsilon = tf.convert_to_tensor(epsilon(), dtype=y_pred.dtype.base_dtype)
    y_pred = tf.clip_by_value(y_pred, _epsilon, 1. - _epsilon)

    axis = len(y_pred.get_shape()) - 1

    white_true = tf.gather(params=y_true, indices=[white_idx], axis=axis)
    black_true = tf.gather(params=y_true, indices=[1 - white_idx], axis=axis)

    white_pred = tf.gather(params=y_pred, indices=[white_idx], axis=axis)
    black_pred = tf.gather(params=y_pred, indices=[1 - white_idx], axis=axis)

    return -tf.reduce_sum(
        (black_true * tf.log(black_pred) + white_true * tf.log(white_pred) *
         (rate - 1)) / rate,
        axis=axis)
コード例 #14
0
 def categorical_crossentropy_regularized(target, output):
     """Categorical crossentropy between an output tensor and a target tensor.
     # Arguments
         target: A tensor of the same shape as `output`.
         output: A tensor resulting from a softmax
             (unless `from_logits` is True, in which
             case `output` is expected to be the logits).
     # Returns
         Output tensor.
     """
     # scale preds so that the class probas of each sample sum to 1
     output /= tf.reduce_sum(output, len(output.get_shape()) - 1, True)
     # manual computation of crossentropy
     _epsilon = tf.convert_to_tensor(epsilon(),
                                     dtype=output.dtype.base_dtype)
     output = tf.clip_by_value(output, _epsilon, 1. - _epsilon)
     return -tf.reduce_sum(
         target * tf.log(output) -
         entropy_beta * output * tf.log(output),
         len(output.get_shape()) - 1)
コード例 #15
0
def logit(inputs):
    _epsilon = _to_tensor(epsilon(), inputs.dtype.base_dtype)
    inputs = tf.clip_by_value(inputs, _epsilon, 1 - _epsilon)
    inputs = tf.log(inputs / (1 - inputs))
    return inputs
コード例 #16
0
def eps():
    return epsilon()
コード例 #17
0
ファイル: __init__.py プロジェクト: whztt07/plaidml
def binary_crossentropy(target, output, from_logits=False):
    if from_logits:
        output = sigmoid(output)
    return _KerasNode('binary_crossentropy',
                      tensor=plaidml_op.binary_crossentropy(target.tensor, output.tensor,
                                                            epsilon()))
コード例 #18
0
ファイル: Train.py プロジェクト: MiRA-lab-dev/SynRec
def binary_crossentropy_loss(y_true, y_pred):
    _epsilon = tf.convert_to_tensor(epsilon(), y_pred.dtype.base_dtype)
    y_pred_clip = tf.clip_by_value(y_pred, _epsilon, 1 - _epsilon)
    y1 = -1 * y_true * K.log(y_pred_clip)
    y2 = -1 * (1 - y_true) * K.log(1 - y_pred_clip)
    return K.mean(y1 + y2, axis=-1)