Example #1
0
def calculate_score(loss_file, reverse, smoothing):
    if not os.path.isdir(loss_file):
        loss_file_path = loss_file
    else:
        optical_result = compute_auc(loss_file, reverse, smoothing)
        loss_file_path = optical_result.loss_file
        print('##### optimal result and model = {}'.format(optical_result))
    dataset, psnr_records, gt = load_psnr_gt(loss_file=loss_file_path)

    # the number of videos
    num_videos = len(psnr_records)

    scores = np.array([], dtype=np.float32)
    labels = np.array([], dtype=np.int8)
    # video normalization
    for i in range(num_videos):
        distance = psnr_records[i]

        distance = (distance - distance.min()) / (distance.max() -
                                                  distance.min())
        if reverse:
            distance = 1 - distance
        if smoothing:
            distance = score_smoothing(distance)
        scores = np.concatenate(
            (scores[:], distance[DECIDABLE_IDX:-DECIDABLE_IDX]), axis=0)
        labels = np.concatenate(
            (labels[:], gt[i][DECIDABLE_IDX:-DECIDABLE_IDX]), axis=0)

    mean_normal_scores = np.mean(scores[labels == 0])
    mean_abnormal_scores = np.mean(scores[labels == 1])
    print('mean normal scores = {}, mean abnormal scores = {}, '
          'delta = {}'.format(mean_normal_scores, mean_abnormal_scores,
                              mean_normal_scores - mean_abnormal_scores))
Example #2
0
def get_scores_labels(loss_file, reverse, smoothing):
    # the name of dataset, loss, and ground truth
    dataset, psnr_records, gt = load_psnr_gt(loss_file=loss_file)

    # the number of videos
    num_videos = len(psnr_records)

    scores = np.array([], dtype=np.float32)
    labels = np.array([], dtype=np.int8)
    # video normalization
    for i in range(num_videos):
        distance = psnr_records[i]

        if NORMALIZE:
            distance = (distance - distance.min()) / (distance.max() -
                                                      distance.min() + 1e-8)
            #distance -= distance.min()  # distances = (distance - min) / (max - min)
            #distance /= distance.max()
            if reverse:
                distance = 1 - distance
        if smoothing:
            distance = score_smoothing(distance)
        scores = np.concatenate(
            (scores[:], distance[DECIDABLE_IDX:-DECIDABLE_IDX]), axis=0)
        labels = np.concatenate(
            (labels[:], gt[i][DECIDABLE_IDX:-DECIDABLE_IDX]), axis=0)
    return dataset, scores, labels
Example #3
0
def compute_auc_average(loss_file, reverse, smoothing):
    if not os.path.isdir(loss_file):
        loss_file_list = [loss_file]
    else:
        loss_file_list = os.listdir(loss_file)
        loss_file_list = [
            os.path.join(loss_file, sub_loss_file)
            for sub_loss_file in loss_file_list
        ]

    optimal_results = RecordResult()
    for sub_loss_file in loss_file_list:
        # the name of dataset, loss, and ground truth
        dataset, psnr_records, gt = load_psnr_gt(loss_file=sub_loss_file)
        if dataset == 'shanghaitech':
            gt[51][5] = 0
        elif dataset == 'ped2':
            for i in range(7, 11):
                gt[i][0] = 0
        elif dataset == 'ped1':
            gt[13][0] = 0
        # the number of videos
        num_videos = len(psnr_records)

        scores = np.array([], dtype=np.float32)
        labels = np.array([], dtype=np.int8)
        # video normalization
        auc = 0
        for i in range(num_videos):
            distance = psnr_records[i]

            if NORMALIZE:
                distance = (distance - distance.min()) / (
                    distance.max() - distance.min() + 1e-8)

                # distance -= distance.min()  # distances = (distance - min) / (max - min)
                # distance /= distance.max()
                # for the score is anomaly score
                if reverse:
                    distance = 1 - distance
            # to smooth the score
            if smoothing:
                distance = score_smoothing(distance)
            # scores = np.concatenate((scores[:], distance[DECIDABLE_IDX:-DECIDABLE_IDX]), axis=0)
            # labels = np.concatenate((labels[:], gt[i][DECIDABLE_IDX:-DECIDABLE_IDX]), axis=0)

            #_auc = metrics.roc_auc_score(np.array(gt[i],dtype=np.int8),np.array(distance,dtype=np.float32))
            #_auc = metrics.auc(fpr, tpr)
            fpr, tpr, thresholds = metrics.roc_curve(np.array(gt[i],
                                                              dtype=np.int8),
                                                     np.array(
                                                         distance,
                                                         dtype=np.float32),
                                                     pos_label=0)
            _auc = metrics.auc(fpr, tpr)
            print('video {}: auc is {}'.format(i + 1, _auc))
            auc += _auc
        auc /= num_videos
        print(auc)
Example #4
0
def compute_auc(loss_file, reverse, smoothing):
    if not os.path.isdir(loss_file):
        loss_file_list = [loss_file]
    else:
        loss_file_list = os.listdir(loss_file)
        loss_file_list = [
            os.path.join(loss_file, sub_loss_file)
            for sub_loss_file in loss_file_list
        ]

    optimal_results = RecordResult()
    for sub_loss_file in loss_file_list:
        # the name of dataset, loss, and ground truth
        dataset, psnr_records, gt = load_psnr_gt(loss_file=sub_loss_file)

        # the number of videos
        num_videos = len(psnr_records)

        scores = np.array([], dtype=np.float32)
        labels = np.array([], dtype=np.int8)
        # video normalization
        for i in range(num_videos):
            distance = psnr_records[i]

            if NORMALIZE:
                distance = (distance - distance.min()) / (
                    distance.max() - distance.min() + 1e-8)
                # distance -= distance.min()  # distances = (distance - min) / (max - min)
                # distance /= distance.max()
                # for the score is anomaly score
                if reverse:
                    distance = 1 - distance
            # to smooth the score
            if smoothing:
                distance = score_smoothing(distance)
            scores = np.concatenate(
                (scores[:], distance[DECIDABLE_IDX:-DECIDABLE_IDX]), axis=0)
            labels = np.concatenate(
                (labels[:], gt[i][DECIDABLE_IDX:-DECIDABLE_IDX]), axis=0)

        fpr, tpr, thresholds = metrics.roc_curve(labels, scores, pos_label=0)
        auc = metrics.auc(fpr, tpr)

        results = RecordResult(fpr, tpr, auc, dataset, sub_loss_file)

        if optimal_results < results:
            optimal_results = results

        if os.path.isdir(loss_file):
            print(results)
    print('##### optimal result and model AUC= {}'.format(optimal_results))
    return optimal_results