Beispiel #1
0
def metriccalculation(predictions, Y_validation, numpos=None):
    """
    Calculates the global average precision between the predictions and Y_validation arrays.

    :param predictions:
    :param Y_validation:
    :param positive_labels_count: In case the Y_validation array isn't the complete, this parameter gives the
        true number of positive labels.
    :return: The score given by GAP.
    """
    if predictions.shape != Y_validation.shape:
        raise ValueError(
            "Different shapes between 'predictions' and 'Y_validation'")
    valcases = len(Y_validation)
    gap = GAP.AveragePrecisionCalculator(20 * valcases)

    predictions = np.array(predictions)
    Y_validation = np.array(Y_validation)

    for i in range(valcases):
        p = predictions[i].argsort()[::-1]
        predictions[i] = predictions[i][p]
        Y_validation[i] = Y_validation[i][p]
        if numpos is None:
            gap.accumulate(predictions[i][:20],
                           Y_validation[i][:20],
                           num_positives=np.sum(Y_validation[i]))
        else:
            gap.accumulate(predictions[i][:20],
                           Y_validation[i][:20],
                           num_positives=numpos[i])

    return gap.peek_ap_at_n()
Beispiel #2
0
 def __init__(self, num_class, top_k):
  
   self.sum_hit_at_one = 0.0
   self.sum_perr = 0.0
   self.sum_loss = 0.0
   self.map_calculator = map_calculator.MeanAveragePrecisionCalculator(num_class)
   self.global_ap_calculator = ap_calculator.AveragePrecisionCalculator()
   self.top_k = top_k
   self.num_examples = 0
Beispiel #3
0
  def __init__(self, num_class):
    
    if not isinstance(num_class, int) or num_class <= 1:
      raise ValueError("num_class must be a positive integer.")

    self._ap_calculators = []  # member of AveragePrecisionCalculator
    self._num_class = num_class  # total number of classes
    for i in range(num_class):
      self._ap_calculators.append(
          average_precision_calculator.AveragePrecisionCalculator())
Beispiel #4
0
def calculate_gap(predictions, actuals, top_k=20):
  """Performs a local (numpy) calculation of the global average precision.
  Only the top_k predictions are taken for each of the videos.
  Args:
    predictions: Matrix containing the outputs of the model.
      Dimensions are 'batch' x 'num_classes'.
    actuals: Matrix containing the ground truth labels.
      Dimensions are 'batch' x 'num_classes'.
    top_k: How many predictions to use per video.
  Returns:
    float: The global average precision.
  """
  gap_calculator = ap_calculator.AveragePrecisionCalculator()
  sparse_predictions, sparse_labels, num_positives = top_k_by_class(predictions, actuals, top_k)
  gap_calculator.accumulate(flatten(sparse_predictions), flatten(sparse_labels), sum(num_positives))
  return gap_calculator.peek_ap_at_n()
Beispiel #5
0
 def __init__(self, num_class, top_k):
   """Construct an EvaluationMetrics object to store the evaluation metrics.
   Args:
     num_class: A positive integer specifying the number of classes.
     top_k: A positive integer specifying how many predictions are considered per video.
   Raises:
     ValueError: An error occurred when MeanAveragePrecisionCalculator cannot
       not be constructed.
   """
   self.sum_hit_at_one = 0.0
   self.sum_perr = 0.0
   self.sum_loss = 0.0
   self.map_calculator = map_calculator.MeanAveragePrecisionCalculator(num_class)
   self.global_ap_calculator = ap_calculator.AveragePrecisionCalculator()
   self.top_k = top_k
   self.num_examples = 0
Beispiel #6
0
    def __init__(self, num_class, filter_empty_classes=True):
        """Construct a calculator to calculate the (macro) average precision.

    Args:
      num_class: A positive Integer specifying the number of classes.
      filter_empty_classes: whether to filter classes without any positives.

    Raises:
      ValueError: An error occurred when num_class is not a positive integer;
      or the top_n_array is not a list of positive integers.
    """
        if not isinstance(num_class, int) or num_class <= 1:
            raise ValueError("num_class must be a positive integer.")

        self._ap_calculators = []  # member of AveragePrecisionCalculator
        self._num_class = num_class  # total number of classes
        self._filter_empty_classes = filter_empty_classes
        for i in range(num_class):
            self._ap_calculators.append(
                average_precision_calculator.AveragePrecisionCalculator())
Beispiel #7
0
    def __init__(self, num_class):
        """Construct a calculator to calculate the (macro) average precision.
    Args:
      num_class: A positive Integer specifying the number of classes.
      top_n_array: A list of positive integers specifying the top n for each
      class. The top n in each class will be used to calculate its average
      precision at n.
      The size of the array must be num_class.
    Raises:
      ValueError: An error occurred when num_class is not a positive integer;
      or the top_n_array is not a list of positive integers.
    """
        if not isinstance(num_class, int) or num_class <= 1:
            raise ValueError("num_class must be a positive integer.")

        self._ap_calculators = []  # member of AveragePrecisionCalculator
        self._num_class = num_class  # total number of classes
        for i in range(num_class):
            self._ap_calculators.append(
                average_precision_calculator.AveragePrecisionCalculator())
Beispiel #8
0
def calculate_gap(predictions, actuals, top_k=20):
 
  gap_calculator = ap_calculator.AveragePrecisionCalculator()
  sparse_predictions, sparse_labels, num_positives = top_k_by_class(predictions, actuals, top_k)
  gap_calculator.accumulate(flatten(sparse_predictions), flatten(sparse_labels), sum(num_positives))
  return gap_calculator.peek_ap_at_n()
Beispiel #9
0
 def __init__(self, num_class, top_k):
     self.top_k = top_k
     self.global_ap_calculator = ap_calculator.AveragePrecisionCalculator()