Ejemplo n.º 1
0
    def test(self):
        # Define the files we'll be making
        testProjectName = 'test_project.ilp'
        # Clean up: Remove the test data files we created last time (just in case)
        for f in [testProjectName]:
            try:
                os.remove(f)
            except:
                pass

        # Create an empty project
        with h5py.File(testProjectName) as testProject:
            testProject.create_dataset("ilastikVersion", data=0.6)

            # Create an operator to work with and give it some input
            graph = Graph()
            operatorToSave = OpFeatureSelection(graph=graph)

            # Configure scales
            scales = [0.1, 0.2, 0.3, 0.4, 0.5]
            operatorToSave.Scales.setValue(scales)

            # Configure feature types
            featureIds = ['GaussianSmoothing', 'LaplacianOfGaussian']
            operatorToSave.FeatureIds.setValue(featureIds)

            # All False (no features selected)
            selectionMatrix = numpy.zeros((2, 5), dtype=bool)

            # Change a few to True
            selectionMatrix[0, 0] = True
            selectionMatrix[1, 0] = True
            selectionMatrix[0, 2] = True
            selectionMatrix[1, 4] = True
            operatorToSave.SelectionMatrix.setValue(selectionMatrix)

            # Serialize!
            serializer = FeatureSelectionSerializer(operatorToSave,
                                                    'FeatureSelections')
            serializer.serializeToHdf5(testProject, testProjectName)

            assert (
                testProject['FeatureSelections/Scales'].value == scales).all()
            assert (testProject['FeatureSelections/FeatureIds'].value ==
                    featureIds).all()
            assert (testProject['FeatureSelections/SelectionMatrix'].value ==
                    selectionMatrix).all()

            # Deserialize into a fresh operator
            operatorToLoad = OpFeatureSelection(graph=graph)
            deserializer = FeatureSelectionSerializer(operatorToLoad,
                                                      'FeatureSelections')
            deserializer.deserializeFromHdf5(testProject, testProjectName)

            assert (operatorToLoad.Scales.value == scales).all()
            assert (operatorToLoad.FeatureIds.value == featureIds).all()
            assert (
                operatorToLoad.SelectionMatrix.value == selectionMatrix).all()

        os.remove(testProjectName)
Ejemplo n.º 2
0
    def __init__(self, filter_implementation, *args, **kwargs):
        # Schematic for cached images is as follows.
        #
        # InputImage -> opFeatureSelection -> opIntegralImage_from_cache -> opIntegralImageCache ---
        #           \                                                                               `-- (stacked via execute) -> CachedOutputImage
        #            `-> opHessianEigenvectors -> opConvertToChannels -> opHessianEigenvectorCache -/

        super(OpIIBoostFeatureSelection, self).__init__(*args, **kwargs)
        self.opFeatureSelection = OpFeatureSelection(filter_implementation,
                                                     parent=self)

        self.opFeatureSelection.InputImage.connect(self.InputImage)
        self.opFeatureSelection.Scales.connect(self.Scales)
        self.opFeatureSelection.FeatureIds.connect(self.FeatureIds)
        self.opFeatureSelection.SelectionMatrix.connect(self.SelectionMatrix)
        self.opFeatureSelection.FeatureListFilename.connect(
            self.FeatureListFilename)
        self.FeatureLayers.connect(self.opFeatureSelection.FeatureLayers)

        self.WINDOW_SIZE = self.opFeatureSelection.WINDOW_SIZE

        # The "normal" pixel features are integrated.
        self.opIntegralImage = OpIntegralImage(parent=self)
        self.opIntegralImage.Input.connect(self.opFeatureSelection.OutputImage)

        self.opIntegralImage_from_cache = OpIntegralImage(parent=self)
        self.opIntegralImage_from_cache.Input.connect(
            self.opFeatureSelection.CachedOutputImage)

        # We use an UNBLOCKED cache to store integral features, because a blocked cache would service
        #  requests by concatenating neighboring blocks.  That is not a valid operation for integral images.
        self.opIntegralImageCache = OpUnblockedArrayCache(parent=self)
        self.opIntegralImageCache.Input.connect(
            self.opIntegralImage_from_cache.Output)

        # Note: OutputImage and CachedOutputImage are not directly connected.
        #       Their data is obtained in execute(), below.

        self.opHessianEigenvectors = OpHessianEigenvectors(parent=self)
        self.opHessianEigenvectors.Input.connect(self.InputImage)

        # The operator above produces an image with weird axes,
        #  so let's convert it to a multi-channel image for easy handling.
        self.opConvertToChannels = OpConvertEigenvectorsToChannels(parent=self)
        self.opConvertToChannels.Input.connect(
            self.opHessianEigenvectors.Output)

        # Create a cache for the hessian eigenvector image data
        self.opHessianEigenvectorCache = OpSlicedBlockedArrayCache(parent=self)
        self.opHessianEigenvectorCache.name = "opHessianEigenvectorCache"
        self.opHessianEigenvectorCache.Input.connect(
            self.opConvertToChannels.Output)
        self.opHessianEigenvectorCache.fixAtCurrent.setValue(False)

        self.InputImage.notifyReady(self.checkConstraints)

        self.input_axistags = None
        self.InputImage.notifyMetaChanged(self._handleMetaChanged)
    def test_2d(self):
        graph = Graph()
        data2d = numpy.random.random((2,100,100,1,3))
        data2d = vigra.taggedView(data2d, axistags='txyzc')
        # Define operators
        opFeatures = OpFeatureSelection(graph=graph)
        opFeatures.Scales.connect(self.opFeatures.Scales[0])
        opFeatures.FeatureIds.connect(self.opFeatures.FeatureIds[0])
        opFeatures.SelectionMatrix.connect(self.opFeatures.SelectionMatrix[0])

        # Set input data
        opFeatures.InputImage.setValue(data2d)

        # Compute results for the top slice only
        topSlice = [0, slice(None), slice(None), 0, slice(None)]
        result = opFeatures.OutputImage[topSlice].wait()
Ejemplo n.º 4
0
        def __init__(self):
            graph = Graph()
            self._reorder_op = OpReorderAxes(graph=graph, AxisOrder=axis_order)

            self._feature_sel_op = OpFeatureSelection(graph=graph)
            self._feature_sel_op.InputImage.connect(self._reorder_op.Output)
            self._feature_sel_op.FeatureIds.setValue(feature_matrix.names)
            self._feature_sel_op.Scales.setValue(feature_matrix.scales)
            self._feature_sel_op.SelectionMatrix.setValue(feature_matrix.selections)
            self._feature_sel_op.ComputeIn2d.setValue(feature_matrix.compute_in_2d.tolist())

            self._predict_op = OpClassifierPredict(graph=graph)
            self._predict_op.Classifier.setValue(classifer.instance)
            self._predict_op.Classifier.meta.classifier_factory = classifer.factory
            self._predict_op.Image.connect(self._feature_sel_op.OutputImage)
            self._predict_op.LabelsCount.setValue(classifer.label_count)
Ejemplo n.º 5
0
    def test_ComputeIn2d(self):
        # tests ComputIn2d flag on smoothing of a 3d block (smoothing across all three, or only 2 dimensions)
        opFeatures = OpFeatureSelection(graph=Graph())
        opFeatures.Scales.setValue([1.0])
        opFeatures.FeatureIds.setValue(["GaussianSmoothing"])
        opFeatures.SelectionMatrix.setValue(numpy.ones((1, 1), dtype=bool))
        opFeatures.ComputeIn2d.setValue([False])
        shape = [5, 5, 5]
        data = numpy.ones(shape, dtype=numpy.float32)
        for z in range(shape[0]):
            # make sure data is anisotropic in z
            data[z, z, 0] = 0

        data = vigra.taggedView(data[None, ...], "czyx")
        opFeatures.InputImage.setValue(data)

        res3d = opFeatures.OutputImage[:].wait()
        opFeatures.ComputeIn2d.setValue([True])
        res2d = opFeatures.OutputImage[:].wait()
        assert (res3d != res2d).all()
Ejemplo n.º 6
0
    def test(self):
        # Define the files we'll be making
        testProjectName = "test_project.ilp"
        # Clean up: Remove the test data files we created last time (just in case)
        for f in [testProjectName]:
            try:
                os.remove(f)
            except:
                pass

        # Create an empty project
        with h5py.File(testProjectName, "w") as testProject:
            testProject.create_dataset("ilastikVersion", data=b"1.0.0")

            # Create an operator to work with and give it some input
            graph = Graph()
            operatorToSave = OpFeatureSelection(graph=graph)

            scales = operatorToSave.Scales.value
            featureIds = operatorToSave.FeatureIds.value

            # All False (no features selected)
            selectionMatrix = operatorToSave.MinimalFeatures

            # Change a few to True
            selectionMatrix[0, 0] = True
            selectionMatrix[1, 1] = True
            selectionMatrix[2, 2] = True
            selectionMatrix[3, 3] = True
            selectionMatrix[4, 4] = True
            selectionMatrix[5, 5] = True
            operatorToSave.SelectionMatrix.setValue(selectionMatrix)

            # Serialize!
            serializer = FeatureSelectionSerializer(operatorToSave,
                                                    "FeatureSelections")
            serializer.serializeToHdf5(testProject, testProjectName)

        with h5py.File(testProjectName, "r") as testProject:
            file_feature_ids = numpy.asarray(
                list(
                    map(lambda s: s.decode("utf-8"),
                        testProject["FeatureSelections/FeatureIds"].value)))

            assert (
                testProject["FeatureSelections/Scales"].value == scales).all()
            assert (file_feature_ids == featureIds).all()
            assert (testProject["FeatureSelections/SelectionMatrix"].value ==
                    selectionMatrix).all()

            # Deserialize into a fresh operator
            operatorToLoad = OpFeatureSelection(graph=graph)

            deserializer = FeatureSelectionSerializer(operatorToLoad,
                                                      "FeatureSelections")
            deserializer.deserializeFromHdf5(testProject, testProjectName)
            assert (operatorToLoad.FeatureIds.value == getFeatureIdOrder()
                    ), "Feature IDs were deserialized to a strange order!"

            assert isinstance(operatorToLoad.Scales.value, list)
            assert isinstance(operatorToLoad.FeatureIds.value, list)

            assert operatorToLoad.Scales.value == scales
            assert operatorToLoad.FeatureIds.value == featureIds
            assert (
                operatorToLoad.SelectionMatrix.value == selectionMatrix).all()

        os.remove(testProjectName)
    def test(self):
        # Define the files we'll be making
        testProjectName = 'test_project.ilp'
        # Clean up: Remove the test data files we created last time (just in case)
        for f in [testProjectName]:
            try:
                os.remove(f)
            except:
                pass

        # Create an empty project
        with h5py.File(testProjectName, 'w') as testProject:
            testProject.create_dataset("ilastikVersion", data="1.0.0")

            # Create an operator to work with and give it some input
            graph = Graph()
            operatorToSave = OpFeatureSelection(
                graph=graph, filter_implementation='Original')

            scales = operatorToSave.Scales.value
            featureIds = operatorToSave.FeatureIds.value

            # All False (no features selected)
            selectionMatrix = operatorToSave.SelectionMatrix.value.copy()

            # Change a few to True
            selectionMatrix[0, 0] = True
            selectionMatrix[1, 1] = True
            selectionMatrix[2, 2] = True
            selectionMatrix[3, 3] = True
            selectionMatrix[4, 4] = True
            selectionMatrix[5, 5] = True
            operatorToSave.SelectionMatrix.setValue(selectionMatrix)

            # Serialize!
            serializer = FeatureSelectionSerializer(operatorToSave,
                                                    'FeatureSelections')
            serializer.serializeToHdf5(testProject, testProjectName)

        with h5py.File(testProjectName, 'r') as testProject:
            assert (
                testProject['FeatureSelections/Scales'].value == scales).all()
            assert (testProject['FeatureSelections/FeatureIds'].value ==
                    featureIds).all()
            assert (testProject['FeatureSelections/SelectionMatrix'].value ==
                    selectionMatrix).all()

            # Deserialize into a fresh operator
            operatorToLoad = OpFeatureSelection(
                graph=graph, filter_implementation='Original')

            deserializer = FeatureSelectionSerializer(operatorToLoad,
                                                      'FeatureSelections')
            deserializer.deserializeFromHdf5(testProject, testProjectName)
            assert operatorToLoad.FeatureIds.value == getFeatureIdOrder(), \
                "Feature IDs were deserialized to a strange order!"

            assert isinstance(operatorToLoad.Scales.value, list)
            assert isinstance(operatorToLoad.FeatureIds.value, list)

            assert (operatorToLoad.Scales.value == scales)
            assert (operatorToLoad.FeatureIds.value == featureIds)
            assert (
                operatorToLoad.SelectionMatrix.value == selectionMatrix).all()

        os.remove(testProjectName)