def test_chain(self):
        class OpA(graph.Operator):
            Input = graph.InputSlot()  # required slot

            def setupOutputs(self):
                pass

            def propagateDirty(self, *a, **kw):
                pass

        class OpB(graph.Operator):
            Input = graph.InputSlot()  # required slot
            Output = graph.OutputSlot()

            setupOutputs = mock.Mock()

            def propagateDirty(self, *a, **kw):
                pass

        g = graph.Graph()

        op_a = OpA(graph=g)
        op_b = OpB(graph=g)

        op_b.Input.connect(op_a.Input)

        with op_a.transaction:
            op_a.Input.setValue("fadf")
            op_b.setupOutputs.assert_not_called()

        op_b.setupOutputs.assert_called_once()
 def aaaNumpyFile(self):
     g =graph.Graph()
     npfile = "/home/akreshuk/data/synapse_small_4d.npy"
     reader = OpInputDataReader(graph=g)
     reader.FilePath.setValue(npfile)
     #out = reader.Output[:].wait()
     #print out.shape
     
     opFeatures = OpPixelFeaturesPresmoothed(graph=g)
     opFeatures.Scales.setValue(self.scales)
     opFeatures.FeatureIds.setValue(self.featureIds)
     opFeatures.Input.connect(reader.Output)
     opFeatures.Matrix.setValue(self.selectedFeatures[5])
     out = opFeatures.Output[:].wait()
     print out.shape
     
     opFeaturesInterp = OpPixelFeaturesInterpPresmoothed(graph=g)
     opFeaturesInterp.Scales.setValue(self.scales)
     opFeaturesInterp.FeatureIds.setValue(self.featureIds)
     opFeaturesInterp.Input.connect(reader.Output)
     opFeaturesInterp.Matrix.setValue(self.selectedFeatures[5])
     opFeaturesInterp.InterpolationScaleZ.setValue(2)
     out = opFeaturesInterp.Output[:].wait()
     
     print out.shape
 def testSimpleCleanup(self):
     g = graph.Graph()
     op = OpSimple(graph=g)
     r = weakref.ref(op)
     del op
     gc.collect()
     assert r() is None, "cleanup failed"
    def testTransactionMultipleSetsOnSameSlot(self):
        g = graph.Graph()
        op = TransactionOp(graph=g)

        with op.transaction:
            op.Input1.setValue("val1")
            op.Input1.setValue("val2")

        op.setupOutputs.assert_called_once()
    def setup_method(self, method):
        self.g = graph.Graph()
        self.op1 = OpWithMultiInputs(graph=self.g)
        self.op2 = OpWithMultiInputs(graph=self.g)

        self.wrappedOp = OperatorWrapper(OpA, graph=self.g)

        self.wrappedOp.Input1.connect(self.op1.Input)
        self.wrappedOp.Input2.connect(self.op2.Input)
    def testNestedTransactionFails(self):
        g = graph.Graph()
        op = TransactionOp(graph=g)

        with op.transaction:
            op.Input1.setValue("val1")

            with pytest.raises(AssertionError):
                with op.transaction:
                    op.Input2.setValue("val2")
    def testConnectedCleanup(self):
        g = graph.Graph()
        op1 = OpSimple(graph=g)
        op2 = OpSimple(graph=g)

        op2.Input.connect(op1.Output)
        op2.Input.disconnect()
        # op2.cleanUp()

        r = weakref.ref(op2)
        del op2
        gc.collect()
        assert r() is None, "cleanup failed"
def test_operator_str():
    g = graph.Graph()

    class OpA(graph.Operator):
        Input = graph.InputSlot(level=2)

    op = OpA(graph=g)
    op.Input.resize(2)

    assert "level=1" in str(op.Input[0])
    assert "index=(0,)" in str(op.Input[0])

    assert "len=2" in str(op.Input)
    assert "index" not in str(op.Input)
    def test_operator_except_formatting(self):
        op = self.BrokenOp(graph=graph.Graph())

        exc = None

        try:
            op.Out.value
        except Exception as e:
            exc = e

        assert exc

        stack = operator.format_operator_stack(exc.__traceback__)
        assert stack
        assert len(stack) == 2
        assert "TestOperatorStackFormatter.BrokenOp.execute" in stack[0]
 def aaaAssert(self):
     g = graph.Graph()
     data = numpy.zeros((self.nx, self.ny, self.nz), dtype=numpy.float32)
     for i in range(self.data3d.shape[2]):
         data[:, :, i]=i
     data = data.view(vigra.VigraArray)
     data.axistags = vigra.VigraArray.defaultAxistags(3)
     opFeaturesInterp = OpPixelFeaturesInterpPresmoothed(graph=g)
     opFeaturesInterp.Input.setValue(data)
     opFeaturesInterp.Scales.setValue(self.scales)
     opFeaturesInterp.FeatureIds.setValue(self.featureIds)
     opFeaturesInterp.InterpolationScaleZ.setValue(self.scaleZ)
     opFeaturesInterp.Matrix.setValue(self.selectedFeatures[0])
     
     out = opFeaturesInterp.Output[:].wait()
     
     print "passed"
 def runFeatures(self, data, dataInterp):
     g = graph.Graph()
     opFeatures = OpPixelFeaturesPresmoothed(graph=g)
     opFeaturesInterp = OpPixelFeaturesInterpPresmoothed(graph=g)
     
     opFeatures.Input.setValue(dataInterp)
     opFeaturesInterp.Input.setValue(data)
     
     opFeatures.Scales.setValue(self.scales)
     opFeaturesInterp.Scales.setValue(self.scales)
     
     opFeatures.FeatureIds.setValue(self.featureIds)
     opFeaturesInterp.FeatureIds.setValue(self.featureIds)
     
     opFeaturesInterp.InterpolationScaleZ.setValue(self.scaleZ)
     
     #for i, imatrix in enumerate(self.selectedFeatures[0:1]):
     for i, imatrix in enumerate(self.selectedFeatures):
         opFeatures.Matrix.setValue(imatrix)
         opFeaturesInterp.Matrix.setValue(imatrix)
         outputInterpData = opFeatures.Output[:].wait()
         outputInterpFeatures = opFeaturesInterp.Output[:].wait()
         
         for iz in range(self.nz):
         #for iz in range(2, 3):
             #print iz, iz*self.scaleZ
             try:
                 outputInterpDataSlice = opFeatures.Output[:, :, iz*self.scaleZ:iz*self.scaleZ+1, :].wait()
                 outputInterpFeaturesSlice = opFeaturesInterp.Output[:, :, iz, :].wait()
                 assert_array_almost_equal(outputInterpDataSlice, outputInterpFeaturesSlice, 1)
                 assert_array_almost_equal(outputInterpData[:, :, iz*self.scaleZ, 0], outputInterpFeatures[:, :, iz, 0], 1)
                 #assert_array_almost_equal(outputInterpDataSlice[:, :, 0, :], outputInterpData[:, :, iz*self.scaleZ, :], 3)
                 assert_array_almost_equal(outputInterpFeatures[:, :, iz, :], outputInterpFeaturesSlice[:, :, 0, :], 1)
             except AssertionError:
                 print "failed for feature:", imatrix, i
                 print "failed for slice:", iz
                 print "inter data:", outputInterpData[:, :, iz*self.scaleZ, 0]
                 print "inter features:", outputInterpFeatures[:, :, iz, 0]
                 print "inter data slice:", outputInterpDataSlice[:, :, 0, 0]
                 print "inter features:", outputInterpFeaturesSlice[:, :, 0, 0]
                 raise AssertionError
 def aaaSlices(self):
     g = graph.Graph()
     opFeatures = OpPixelFeaturesPresmoothed(graph=g)
     opFeatures.Scales.setValue(self.scales)
     opFeatures.FeatureIds.setValue(self.featureIds)
     opFeatures.Input.setValue(self.dataNoChannels)
     for i, imatrix in enumerate(self.selectedFeatures):
         opFeatures.Matrix.setValue(imatrix)
         #compute in one piece
         dataOne = opFeatures.Output[:].wait()
         
         #compute slice-wise
         for z in range(self.nz):
             dataSlice = opFeatures.Output[:, :, z:z+1].wait()
             try:
                 assert_array_almost_equal(dataOne[:, :, z:z+1], dataSlice, 2)
             except AssertionError:
                 print "wrong for matrix:", imatrix
                 print "wrong for slice:", z
                 print dataOne[:, :, z:z+1]
                 print dataSlice
                 raise AssertionError
    def testTransactionSetMultipleSlots(self):
        input1, input2 = None, None

        def fetch_values(self, *args, **kwargs):
            nonlocal input1, input2
            input1 = self.Input1.value
            input2 = self.Input2.value

        g = graph.Graph()
        op = TransactionOp(graph=g)

        setup_mock = mock.Mock()
        setup_mock.side_effect = fetch_values

        op.setupOutputs = types.MethodType(setup_mock, op)

        with op.transaction:
            op.Input1.setValue("val1")
            op.Input2.setValue("val2")

        op.setupOutputs.assert_called_once()
        assert input1 == "val1"
        assert input2 == "val2"
Beispiel #14
0
    def __init__(
            self,
            current_opFeatureSelection,
            current_pixelClassificationApplet,
            labels_list_data
        ):
        '''

        :param current_opFeatureSelection: opFeatureSelection from ilastik
        :param current_opPixelClassification: opPixelClassification form Ilastik
        '''
        super(FeatureSelectionDialog, self).__init__()

        self.pixelClassificationApplet = current_pixelClassificationApplet
        self.opPixelClassification = current_pixelClassificationApplet.topLevelOperatorView
        self.opFeatureSelection = current_opFeatureSelection

        self._init_feature_matrix = False

        # lazyflow required operator to be connected to a graph, although no interconnection takes place here
        g = graph.Graph()

        # instantiate feature selection operators
        # these operators are not connected to the ilastik lazyflow architecture.
        # Reason provided in self._run_selection()
        self.opFilterFeatureSelection = opPixelClassification.OpFilterFeatureSelection(graph=g)
        self.opWrapperFeatureSelection = opPixelClassification.OpWrapperFeatureSelection(graph=g)
        self.opGiniFeatureSelection = opPixelClassification.OpGiniFeatureSelection(graph=g)

        # retrieve the featureMatrixCaches operator from the opPixelClassification. This operator provides the features
        # and labels matrix required by the feature selection operators
        self.opFeatureMatrixCaches = self.opPixelClassification.opFeatureMatrixCaches

        '''FIXME / FixMe: the FeatureSelectionDialog will only display one slice of the dataset. This is for RAM saving
        reasons. By using only one slice, we can simple predict the segmentation of that slice for each feature set and
        store it in RAM. If we allowed to show the whole dataset, then we would have to copy the opFeatureSelection and
        opPixelClassification once for each feature set. This would result in too much feature computation time as
        well as too much RAM usage.
        However, this shortcoming could be overcome by creating something like an opFeatureSubset. Then we would enable
        all features in the opFeatureSelection and the feature sets are created by 'filtering' the output of the
        opFeatureSelection. Thereby, provided that features in the opFeatureSelection are cached (are they?) the
        features would not have to be recalculated for each feature set.'''
        self._xysliceID = -1

        self._initialized_all_features_segmentation_layer = False
        self._initialized_current_features_segmentation_layer = False
        self._initialized_feature_matrix = False

        self._selected_feature_set_id = None
        self.selected_features_matrix = self.opFeatureSelection.SelectionMatrix.value
        self.feature_channel_names = None #this gets initialized when the matrix is set to all features in _run_selection

        self._stack_dim = self.opPixelClassification.InputImages.meta.shape
        self._stack_axistags = self.opPixelClassification.InputImages.meta.axistags

        self.__selection_methods = {
            0: "gini",
            1: "filter",
            2: "wrapper"
        }

        self._selection_params = {
            "num_of_feat": 7,  #arbitrary number for the default, Ulli thinks it's good
            "c": 0.1
        }
        self._selection_method = "None"
        self._gui_initialized = False #  is set to true once gui is initialized, prevents multiple initialization
        self._feature_selection_results = []

        self.labels_list_data = labels_list_data
        self.layerstack = layerwidget.LayerStackModel()

        # this initializes the actual GUI
        self._init_gui()

        # set default parameter values
        self.number_of_feat_box.setValue(self._selection_params["num_of_feat"])
        self.spinbox_c_widget.setValue(self._selection_params["c"])

        # connect functionality
        self.cancel_button.clicked.connect(self.reject)
        self.select_set_button.clicked.connect(self.accept)
        self.select_method_cbox.currentIndexChanged.connect(self._handle_selected_method_changed)
        self.spinbox_c_widget.valueChanged.connect(self._update_parameters)
        self.number_of_feat_box.valueChanged.connect(self._update_parameters)
        self.run_button.clicked.connect(self._run_selection)
        self.all_feature_sets_combo_box.currentIndexChanged.connect(self._handle_selected_feature_set_changed)

        # make sure internal variable are in sync with gui
        self._handle_selected_method_changed()
        self._update_parameters()

        self.resize(1366, 768)
Beispiel #15
0
    def propagateDirty(self, slot, subindex, roi):
        self.Output.setDirty(slice(None))

    def cleanup(self):
        self.cache.Input.disconnect()



if __name__ == '__main__':
    # assumes there is a server running at localhost
    logging.basicConfig(level=logging.DEBUG)
    volume_url = 'http://localhost:8000/cremi'

    from lazyflow import graph
    g = graph.Graph()
    op = OpRESTfulPrecomputedChunkedVolumeReader(graph=g)
    op.BaseUrl.setValue(volume_url)
    print(f'available scales: {op.AvailableScales.value}')
    print(f'Selected scale: {op.Scale.value}')

    # get some data
    roi = ((0, 0, 0, 0), (1, 10, 100, 100))
    data = op.Output(*roi).wait()
    import h5py
    with h5py.File('/tmp/temph5.h5', 'w') as f:
        f.create_dataset('exported', data=data)

    # get some data for the second time, check on server that it has only
    # been requested once
    data2 = op.Output(*roi).wait()
 def setup_method(self, method):
     self.g = graph.Graph()
 def setup(self):
     self.g = graph.Graph()