Пример #1
0
def test_accuracy_u2Squared(capsys):
    """
    
    Test U2Squared FScore on DUST-TE Dataset

    """

    dataset = fo.load_dataset("duts_te_validation")

    DatasetPathPrediction = "/home/Develop/Dataset/SemanticSegmentation/DUTS-TE/DUTS-TE-Image-Pred-Mask/"
    with capsys.disabled():
        with fo.ProgressBar() as pb:
            for sample in dataset:

                head, tail = os.path.split(sample.filepath)

                maskPredPath = DatasetPathPrediction + tail

                print(maskPredPath)

                maskPred = cv2.imread(maskPredPath, cv2.IMREAD_UNCHANGED)
                ret, maskPred = cv2.threshold(maskPred, 127, 255,
                                              cv2.THRESH_BINARY)

                sample["u2squared"] = fo.Segmentation(mask=maskPred)
                sample.save()

        results = dataset.evaluate_segmentations(
            "u2squared",
            gt_field="ground_truth",
            eval_key="eval_segmentation",
        )

        print(results.metrics())
        # 0.80 is the value reported in the paper as weighted F-measure for DUTS-TE Dataset
        assert (results.metrics()["fscore"] > 0.80)
Пример #2
0
    def test_from_fiftyone(tmpdir):
        tmp_dir = Path(tmpdir)

        # create random dummy data

        images = [
            str(tmp_dir / "img1.png"),
            str(tmp_dir / "img2.png"),
            str(tmp_dir / "img3.png"),
        ]

        num_classes: int = 2
        img_size: Tuple[int, int] = (128, 128)

        for img_file in images:
            _rand_image(img_size).save(img_file)

        targets = [np.array(_rand_labels(img_size, num_classes)) for _ in range(3)]

        dataset = fo.Dataset.from_dir(
            str(tmp_dir),
            dataset_type=fo.types.ImageDirectory,
        )

        for idx, sample in enumerate(dataset):
            sample["ground_truth"] = fo.Segmentation(mask=targets[idx][:, :, 0])
            sample.save()

        # instantiate the data module

        dm = SemanticSegmentationData.from_fiftyone(
            train_dataset=dataset,
            val_dataset=dataset,
            test_dataset=dataset,
            predict_dataset=dataset,
            batch_size=2,
            num_workers=0,
            num_classes=num_classes,
        )
        assert dm is not None
        assert dm.train_dataloader() is not None
        assert dm.val_dataloader() is not None
        assert dm.test_dataloader() is not None

        # check training data
        data = next(iter(dm.train_dataloader()))
        imgs, labels = data[DefaultDataKeys.INPUT], data[DefaultDataKeys.TARGET]
        assert imgs.shape == (2, 3, 128, 128)
        assert labels.shape == (2, 128, 128)

        # check val data
        data = next(iter(dm.val_dataloader()))
        imgs, labels = data[DefaultDataKeys.INPUT], data[DefaultDataKeys.TARGET]
        assert imgs.shape == (2, 3, 128, 128)
        assert labels.shape == (2, 128, 128)

        # check test data
        data = next(iter(dm.test_dataloader()))
        imgs, labels = data[DefaultDataKeys.INPUT], data[DefaultDataKeys.TARGET]
        assert imgs.shape == (2, 3, 128, 128)
        assert labels.shape == (2, 128, 128)

        # check predict data
        data = next(iter(dm.predict_dataloader()))
        imgs = data[DefaultDataKeys.INPUT]
        assert imgs.shape == (2, 3, 128, 128)
Пример #3
0
maskList= os.listdir(DatasetPath)

samples=[]
dataset = fo.Dataset("duts_te_validation")
dataset.persistent=True

for maskName in maskList:
    
    maskPath= DatasetPath + maskName

    # load mask with opencv
    mask = cv2.imread(maskPath, cv2.IMREAD_UNCHANGED)
    ret,mask= cv2.threshold(mask, 127,255,cv2.THRESH_BINARY)
    
    sample = fo.Sample(filepath=maskPath,
                    ground_truth=fo.Segmentation(mask=mask))
    
    samples.append(sample)

dataset.add_samples(samples)

# ADD PREDICTION
#To add prediction you need
# 1) Cycle over all dataset sample GT
# 2) For each sample add a custom field u2squared 
 

dataset = fo.load_dataset("duts_te_validation")

DatasetPathPrediction= "/home/Develop/Dataset/SemanticSegmentation/DUTS-TE/DUTS-TE-Image-Pred-Mask/"