def get_config(self): if is_tensor_or_variable(self.label_weights): label_weights = K.eval(self.label_weights) else: label_weights = self.label_weights config = { 'num_thresholds': self.num_thresholds, 'curve': self.curve.value, 'summation_method': self.summation_method.value, # We remove the endpoint thresholds as an inverse of how the thresholds # were initialized. This ensures that a metric initialized from this # config has the same thresholds. 'thresholds': self.thresholds[1:-1], 'multi_label': self.multi_label, 'label_weights': label_weights } base_config = super(AUC, self).get_config() return dict(list(base_config.items()) + list(config.items()))
def get_config(self): config = {} for k, v in six.iteritems(self._fn_kwargs): config[k] = K.eval(v) if tf_utils.is_tensor_or_variable(v) else v base_config = super(LossFunctionWrapper, self).get_config() return dict(list(base_config.items()) + list(config.items()))
def get_config(self): config = {} for k, v in six.iteritems(self._fn_kwargs): config[k] = K.eval(v) if is_tensor_or_variable(v) else v base_config = super(LossFunctionWrapper, self).get_config() return dict(list(base_config.items()) + list(config.items()))
def get_config(self): config = {} for k, v in six.iteritems(self._fn_kwargs): config[k] = K.eval(v) if is_tensor_or_variable(v) else v base_config = super(CustomMeanMetricWrapper, self).get_config() return dict(list(base_config.items()) + list(config.items()))