def precision_male(y_true, y_pred): """ Compute precision for the class "male" :param y_true: true labels (dummy numpy array, column 0 for male, column 1 for female) :param y_pred: predicted labels (dummy numpy array, column 0 for male, column 1 for female) :return: precision (float) """ nb_male_pred = K.sum(K.round(K.clip(y_pred[:, 0], 0, 1))) male_true_positives = K.sum( K.round(K.clip(y_true[:, 0] * y_pred[:, 0], 0, 1))) precision = male_true_positives / (nb_male_pred + K.epsilon()) return precision
def recall_female(y_true, y_pred): """ Compute recall for the class "female" :param y_true: true labels (dummy numpy array, column 0 for male, column 1 for female) :param y_pred: predicted labels (dummy numpy array, column 0 for male, column 1 for female) :return: recall (float) """ nb_female = K.sum(K.round(K.clip(y_true[:, 1], 0, 1))) male_true_positives = K.sum( K.round(K.clip(y_true[:, 1] * y_pred[:, 1], 0, 1))) recall = male_true_positives / (nb_female + K.epsilon()) return recall
def get_gradients(self, loss, params): grads = K.gradients(loss, params) if hasattr(self, 'clipnorm') and self.clipnorm > 0: norm = K.sqrt(sum([K.sum(K.square(g)) for g in grads])) grads = [clip_norm(g, self.clipnorm, norm) for g in grads] if hasattr(self, 'clipvalue') and self.clipvalue > 0: grads = [K.clip(g, -self.clipvalue, self.clipvalue) for g in grads] return grads
def kullback_leibler_divergence(y_true, y_pred): y_true = K.clip(y_true, K.epsilon(), 1) y_pred = K.clip(y_pred, K.epsilon(), 1) return K.sum(y_true * K.log(y_true / y_pred), axis=-1)
def mean_squared_logarithmic_error(y_true, y_pred): first_log = K.log(K.clip(y_pred, K.epsilon(), None) + 1.) second_log = K.log(K.clip(y_true, K.epsilon(), None) + 1.) return K.mean(K.square(first_log - second_log), axis=-1)
def mean_absolute_percentage_error(y_true, y_pred): # Equivalent to MAE, but sometimes easier to interpret. diff = K.abs((y_true - y_pred) / K.clip(K.abs(y_true), K.epsilon(), None)) return 100. * K.mean(diff, axis=-1)
def sample_mean_absolute_percentage_error(y_true, y_pred): diff = K.abs((y_true - y_pred) / K.clip(K.abs(y_true) + K.abs(y_pred), K.epsilon(), None)) return 200. * K.mean(diff, axis=-1)
def __call__(self, w): norms = K.sqrt(K.sum(K.square(w), axis=self.axis, keepdims=True)) desired = (self.rate * K.clip(norms, self.min_value, self.max_value) + (1 - self.rate) * norms) w *= (desired / (K.epsilon() + norms)) return w
def __call__(self, w): norms = K.sqrt(K.sum(K.square(w), axis=self.axis, keepdims=True)) desired = K.clip(norms, 0, self.max_value) w *= (desired / (K.epsilon() + norms)) return w