def weighted_binary_cross_entropy(output, target, weight, name=''): r''' This operation computes the weighted binary cross entropy (aka logistic loss) between the ``output`` and ``target``. Example: TBA Args: output: the computed posterior probability from the network target: ground-truth label, 0 or 1 weight: weight of each example name (str, optional): the name of the Function instance in the network Returns: :class:`~cntk.ops.functions.Function` ''' from cntk.cntk_py import weighted_binary_cross_entropy dtype = get_data_type(output, target, weight) output = sanitize_input(output, dtype) target = sanitize_input(target, dtype) weight = sanitize_input(weight, dtype) return weighted_binary_cross_entropy(output, target, weight, name)
def weighted_binary_cross_entropy(output, target, weight, name=''): r''' This operation computes the weighted binary cross entropy (aka logistic loss) between the ``output`` and ``target``. Example: TBA Args: output: the computed posterior probability from the network target: ground-truth label, 0 or 1 weight: weight of each example name (str, optional): the name of the Function instance in the network Returns: :class:`~cntk.ops.functions.Function` ''' from cntk.cntk_py import weighted_binary_cross_entropy dtype = get_data_type(output, target, weight) output = sanitize_input(output, dtype) target = sanitize_input(target, dtype) weight = sanitize_input(weight, dtype) return weighted_binary_cross_entropy(output, target, weight, name)