Пример #1
0
        if args.dtm_type == 'standard':
            dtms = prepare_distance_transform_from_mask(masks, mask_size)
        elif args.dtm_type == 'reciprocal':
            dtms = prepare_reciprocal_DTM_from_mask(masks, mask_size)
        elif args.dtm_type == 'complement':
            dtms = prepare_complement_DTM_from_mask(masks, mask_size)
        elif args.dtm_type == 'other':
            dtms = prepare_other_DTM_from_mask(masks, mask_size, args.bg_constant, args.norm_constant)
        else:
            raise NotImplementedError
        all_masks.append(dtms)

        # --> encode --> decode.
        if args.if_whiten:
            centered_dtms = (dtms - shape_mean) / shape_std
            dtms_codes = fast_ista(centered_dtms, learned_dict, lmbda=sparse_alpha, max_iter=80)
            dtms_rc = torch.matmul(dtms_codes, learned_dict) * shape_std + shape_mean
        else:
            centered_dtms = dtms - shape_mean
            dtms_codes = fast_ista(centered_dtms, learned_dict, lmbda=sparse_alpha, max_iter=80)
            dtms_rc = torch.matmul(dtms_codes, learned_dict) + shape_mean

        dtms_rc = dtms_rc.numpy()
        # evaluate sparsity
        sparsity_counts.append(np.sum(np.abs(dtms_codes.numpy()) > 1e-4))
        kurtosis_counts.append(dtms_codes.numpy())

        # eva.
        dtms_rc = np.where(dtms_rc + args.offset >= 0.5, 1, 0)  # 0.6 for cityscapes masks, 0.9 for coco masks
        IoUevaluate.add_batch(dtms_rc, masks.numpy())
Пример #2
0
    # evaluation.
    IoUevaluate = IOUMetric(2)
    print("Start evaluation ...")
    for i, masks in enumerate(mask_loader):
        print("Eva [{} / {}]".format(i, size_data))
        # generate the reconstruction mask.
        masks = masks.view(masks.shape[0], -1)  # a batch of masks: (N, 784)
        masks = masks.to(torch.float32)
        dtms = prepare_distance_transform_from_mask(
            masks, mask_size)  # for learn DTMs minusone
        # dtms = prepare_overlay_DTMs_from_mask(masks, mask_size)  # for learn DTMs overlay
        # dtms = prepare_extended_DTMs_from_mask(masks, mask_size)  # for learn DTMs extended, 0, 1 range

        # --> encode --> decode.
        dtms_codes = fast_ista(dtms,
                               learned_dict,
                               lmbda=sparse_alpha,
                               max_iter=100)
        dtms_rc = torch.matmul(dtms_codes, learned_dict).numpy()

        # evaluate sparsity
        sparsity_counts.append(np.sum(np.abs(dtms_codes.numpy()) > 1e-4))

        # eva.
        dtms_rc = np.where(dtms_rc + 1 - 0.1 > 0.5, 1,
                           0)  # adjust the thresholding to binary masks
        # dtms_rc = np.where(dtms_rc - 0.1 > 0.5, 1, 0)
        # dtms_rc = np.where(dtms_rc > 0.4, 1, 0)
        IoUevaluate.add_batch(dtms_rc, masks.numpy())
        codes.append(dtms_codes.numpy())

        # do some plots
Пример #3
0
    all_masks = []
    reconstruction_error = []

    # evaluation.
    IoUevaluate = IOUMetric(2)
    print("Start evaluation ...")
    for i, masks in enumerate(mask_loader):
        print("Eva [{} / {}]".format(i, size_data))
        # generate the reconstruction mask.
        masks = masks.view(masks.shape[0], -1)  # a batch of masks: (N, 784)
        masks = masks.to(torch.float32)
        all_masks.append(masks)

        # --> encode --> decode.
        mask_codes = fast_ista(masks,
                               learned_dict,
                               lmbda=sparse_alpha,
                               max_iter=80)
        mask_rc = torch.matmul(mask_codes, learned_dict).numpy()

        # rec_err = np.sum((mask_rc - masks) ** 2, axis=-1).reshape(1, -1)
        # reconstruction_error.append(rec_err)

        sparsity_counts.append(
            np.mean(np.abs(mask_codes.numpy()) > 1e-2, axis=1))
        kurtosis_counts.append(mask_codes.numpy())

        # eva.
        mask_rc = np.where(mask_rc >= 0.5, 1, 0)
        IoUevaluate.add_batch(mask_rc, masks.numpy())
        # break
Пример #4
0
                             num_workers=4)
    size_data = len(mask_loader)

    # evaluation.
    IoUevaluate = IOUMetric(2)
    print("Start evaluation ...")
    for i, masks in enumerate(mask_loader):
        print("Eva [{} / {}]".format(i, size_data))
        # generate the reconstruction mask.
        masks = masks.view(masks.shape[0], -1)  # a batch of masks: (N, 1600)
        polys = prepare_polygon_from_mask(masks, mask_size, n_vertices, pads=5)
        # masks = masks.to(torch.float32)

        # --> encode --> decode.
        polygon_codes = fast_ista(polys,
                                  learned_dict,
                                  lmbda=sparse_alpha,
                                  max_iter=80)
        polygon_rc = torch.matmul(polygon_codes, learned_dict)
        # eva.
        mask_rc = poly_to_mask(polygon_rc, mask_size)
        # mask_rc = poly_to_mask(polys, mask_size)
        # for j in range(mask_rc.shape[0]):
        #     show_img = np.concatenate([masks[j].numpy().reshape((mask_size, mask_size)),
        #                                mask_rc[j].numpy().reshape((mask_size, mask_size))],
        #                               axis=1).astype(np.uint8) * 255
        #     cv2.imshow('cat', show_img)
        #     if cv2.waitKey() & 0xFF == ord('q'):
        #         break
        IoUevaluate.add_batch(mask_rc.numpy(), masks.numpy())

    _, _, _, mean_iu, _ = IoUevaluate.evaluate()