def loss_func(y_true, y_pred_mll): y_true = y_true[:, 0] y_pred = y_pred_mll[:, 0] mll_pred = y_pred_mll[:, 1] mll_loss = K.mean(K.abs(mll_pred - 91.2)) mll_sigma_loss = K.abs(K.std(mll_pred) - 7.67) return binary_crossentropy( y_true, y_pred) + c * mll_loss + c * mll_sigma_loss
def _compute_cost_huber(self, q, a, r, t, q2): preds = slice_tensor_tensor(q, a) bootstrap = K.max if not self.use_mean else K.mean targets = r + (1 - t) * self.gamma * bootstrap(q2, axis=1) err = targets - preds cond = K.abs(err) > 1.0 L2 = 0.5 * K.square(err) L1 = (K.abs(err) - 0.5) cost = tf.where(cond, L2, L1) return K.mean(cost)
def custom_mean_squared_loss(y_true, y_pred): print(y_true) print(y_pred) diff = K.abs(y_true - y_pred) angle_diff = K.minimum(diff[:, :, 6:], 360 - diff[:, :, 6:]) error = tf.concat([diff[:, :, :6], angle_diff], axis=-1) return K.mean(K.square(error), axis=-1)
def confidence_reconstruction_loss(y_true, y_pred, mask, num_steps, gaussian_kernel_size, gaussian_kernel_std): mask_blurred = gaussian_utils.blur_mask(mask, num_steps, gaussian_kernel_size, gaussian_kernel_std) valid_mask = 1 - mask diff = K.abs(y_true - y_pred) l1 = K.mean(diff * valid_mask + diff * mask_blurred, axis=[1, 2, 3]) return l1
def loss_func(y_true, y_pred_mll): y_true = y_true[:, 0] y_pred = y_pred_mll[:, 0] mll_pred = y_pred_mll[:, 1] mll_loss = K.mean(K.abs(mll_pred - 91.2)) # pseudomll = K.random_normal_variable(shape=(1,1), mean=91.2, scale=2) # mll_loss = K.mean((mll_pred - pseudomll)**2) return binary_crossentropy(y_true, y_pred) + c * mll_loss
def reconstruction_loss(y_true, y_pred): diff = K.abs(y_pred - y_true) l1 = K.mean(diff, axis=[1, 2, 3]) return l1
def jaccard(x): return x[0] * x[1] / ( K.sum(x[0]**2, axis=1, keepdims=True) + K.sum(x[1]**2, axis=1, keepdims=True) - K.sum(K.abs(x[0] * x[1]), axis=1, keepdims=True))