Example #1
0
    def parse_classifier_file(self, workspace):
        d = self.get_dictionary(workspace.image_set_list)
        if all([d.has_key(k) for k in (CLASSIFIERS_KEY, FEATURE_ITEMS_KEY)]):
            return
        
        # Load classifier from hdf5
        fileName = str(os.path.join(self.h5_directory.get_absolute_path(), 
                                    self.classifier_file_name.value))
        
        hf = h5py.File(fileName,'r')
        temp = hf['classifiers'].keys()
        # If hf is not closed this leads to an error in win64 and mac os x
        hf.close()
        del hf
        
        classifiers = []
        for cid in temp:
            cidpath = 'classifiers/' + cid
            try:
                classifiers.append(ClassifierRandomForest.deserialize(fileName, cidpath))
            except:
                classifiers.append(ClassifierRandomForest.loadRFfromFile(fileName, cidpath))

        d[CLASSIFIERS_KEY] = classifiers
        
        # Restore user selection of feature items from hdf5
        featureItems = []
        f = h5py.File(fileName,'r')
        for fgrp in f['features'].values():
            featureItems.append(FeatureBase.deserialize(fgrp))
        f.close()
        del f
        d[FEATURE_ITEMS_KEY] = featureItems
    def load_features(self, filename=None):
        # if filename is None:
        #   filename = self.classifier_name

        featureItems = []
        hf = h5py.File(filename, "r")
        for fgrp in hf["features"].values():
            featureItems.append(FeatureBase.deserialize(fgrp))
        hf.close()
        del hf
        self.features = featureItems
Example #3
0
    def load_features(self, filename=None):
        #if filename is None:
        #   filename = self.classifier_name

        featureItems = []
        hf = h5py.File(filename, 'r')
        for fgrp in hf['features'].values():
            featureItems.append(FeatureBase.deserialize(fgrp))
        hf.close()
        del hf
        self.features = featureItems
Example #4
0
    def parse_classifier_file(self, workspace):
        global classifier_dict
        # Load classifier from hdf5
        fileName = str(
            os.path.join(self.h5_directory.get_absolute_path(),
                         self.classifier_file_name.value))
        modtime = os.stat(fileName).st_mtime
        if fileName in classifier_dict:
            last_modtime, d = classifier_dict[fileName]
            if modtime == last_modtime:
                return d

        d = {}
        hf = h5py.File(fileName, 'r')
        temp = hf['classifiers'].keys()
        # If hf is not closed this leads to an error in win64 and mac os x
        hf.close()
        del hf

        classifiers = []
        for cid in temp:
            cidpath = 'classifiers/' + cid
            try:
                classifiers.append(
                    ClassifierRandomForest.deserialize(fileName, cidpath))
            except:
                classifiers.append(
                    ClassifierRandomForest.loadRFfromFile(fileName, cidpath))

        d[CLASSIFIERS_KEY] = classifiers

        # Restore user selection of feature items from hdf5
        featureItems = []
        f = h5py.File(fileName, 'r')
        for fgrp in f['features'].values():
            featureItems.append(FeatureBase.deserialize(fgrp))
        f.close()
        del f
        d[FEATURE_ITEMS_KEY] = featureItems
        classifier_dict[fileName] = (modtime, d)
        return d
    def parse_classifier_hdf5(self, filename):
        '''Parse the classifiers out of the HDF5 file
        
        filename - name of classifier file
        
        returns a dictionary
           CLASSIFIERS_KEY - the random forest classifiers
           FEATURE_ITEMS_KEY - the features needed by the classifier
        '''
        d = {}
        if not isinstance(filename, str):
            filename = filename.encode('utf-8')
        hf = h5py.File(filename,'r')
        temp = hf['classifiers'].keys()
        # If hf is not closed this leads to an error in win64 and mac os x
        hf.close()
        del hf
        
        classifiers = []
        for cid in temp:
            if isinstance(cid, unicode):
                cid = cid.encode('utf-8')
            cidpath = 'classifiers/' + cid
            try:
                classifiers.append(
                    ClassifierRandomForest.deserialize(filename, cidpath))
            except:
                classifiers.append(
                    ClassifierRandomForest.loadRFfromFile(filename, cidpath))

        d[CLASSIFIERS_KEY] = classifiers
        
        # Restore user selection of feature items from hdf5
        featureItems = []
        f = h5py.File(filename,'r')
        for fgrp in f['features'].values():
            featureItems.append(FeatureBase.deserialize(fgrp))
        d[FEATURE_ITEMS_KEY] = featureItems
        f.close()
        del f
        return d
    def parse_classifier_hdf5(self, filename):
        '''Parse the classifiers out of the HDF5 file

        filename - name of classifier file

        returns a dictionary
           CLASSIFIERS_KEY - the random forest classifiers
           FEATURE_ITEMS_KEY - the features needed by the classifier
        '''
        d = {}
        if not isinstance(filename, str):
            filename = filename.encode('utf-8')
        hf = h5py.File(filename, 'r')
        temp = hf['classifiers'].keys()
        # If hf is not closed this leads to an error in win64 and mac os x
        hf.close()
        del hf

        classifiers = []
        for cid in temp:
            if isinstance(cid, unicode):
                cid = cid.encode('utf-8')
            cidpath = 'classifiers/' + cid
            try:
                classifiers.append(
                    ClassifierRandomForest.deserialize(filename, cidpath))
            except:
                classifiers.append(
                    ClassifierRandomForest.loadRFfromFile(filename, cidpath))

        d[CLASSIFIERS_KEY] = classifiers

        # Restore user selection of feature items from hdf5
        featureItems = []
        f = h5py.File(filename, 'r')
        for fgrp in f['features'].values():
            featureItems.append(FeatureBase.deserialize(fgrp))
        d[FEATURE_ITEMS_KEY] = featureItems
        f.close()
        del f
        return d
Example #7
0
    def parse_classifier_file(self, workspace):
        global classifier_dict
        # Load classifier from hdf5
        fileName = str(os.path.join(self.h5_directory.get_absolute_path(), 
                                    self.classifier_file_name.value))
        modtime = os.stat(fileName).st_mtime
        if fileName in classifier_dict:
            last_modtime, d = classifier_dict[fileName]
            if modtime == last_modtime:
                return d
        
        d = {}
        hf = h5py.File(fileName,'r')
        temp = hf['classifiers'].keys()
        # If hf is not closed this leads to an error in win64 and mac os x
        hf.close()
        del hf
        
        classifiers = []
        for cid in temp:
            cidpath = 'classifiers/' + cid
            try:
                classifiers.append(ClassifierRandomForest.deserialize(fileName, cidpath))
            except:
                classifiers.append(ClassifierRandomForest.loadRFfromFile(fileName, cidpath))

        d[CLASSIFIERS_KEY] = classifiers
        
        # Restore user selection of feature items from hdf5
        featureItems = []
        f = h5py.File(fileName,'r')
        for fgrp in f['features'].values():
            featureItems.append(FeatureBase.deserialize(fgrp))
        f.close()
        del f
        d[FEATURE_ITEMS_KEY] = featureItems
        classifier_dict[fileName] = (modtime, d)
        return d
    def parse_classifier_file(self, workspace):
        d = self.get_dictionary(workspace.image_set_list)
        if all([d.has_key(k) for k in (CLASSIFIERS_KEY, FEATURE_ITEMS_KEY)]):
            return

        # Load classifier from hdf5
        fileName = str(
            os.path.join(self.h5_directory.get_absolute_path(),
                         self.classifier_file_name.value))

        hf = h5py.File(fileName, 'r')
        temp = hf['classifiers'].keys()
        # If hf is not closed this leads to an error in win64 and mac os x
        hf.close()
        del hf

        classifiers = []
        for cid in temp:
            cidpath = 'classifiers/' + cid
            try:
                classifiers.append(
                    ClassifierRandomForest.deserialize(fileName, cidpath))
            except:
                classifiers.append(
                    ClassifierRandomForest.loadRFfromFile(fileName, cidpath))

        d[CLASSIFIERS_KEY] = classifiers

        # Restore user selection of feature items from hdf5
        featureItems = []
        f = h5py.File(fileName, 'r')
        for fgrp in f['features'].values():
            featureItems.append(FeatureBase.deserialize(fgrp))
        f.close()
        del f
        d[FEATURE_ITEMS_KEY] = featureItems
Example #9
0
    def _predict_image_with_ilastik(self, image_):
        import ilastik
        from ilastik.core.dataMgr import DataMgr, DataItemImage
        from ilastik.modules.classification.core.featureMgr import FeatureMgr
        from ilastik.modules.classification.core.classificationMgr import ClassificationMgr
        from ilastik.modules.classification.core.features.featureBase import FeatureBase
        from ilastik.modules.classification.core.classifiers.classifierRandomForest import ClassifierRandomForest
        from ilastik.modules.classification.core.classificationMgr import ClassifierPredictThread
        from ilastik.core.volume import DataAccessor
        import numpy, h5py

        dataMgr = DataMgr()

        # Transform input image to ilastik convention s
        # 3D = (time,x,y,z,channel)
        # 2D = (time,1,x,y,channel)
        # Note, this work for 2D images right now. Is there a need for 3D
        image_.shape = (1,1) + image_.shape

        # Check if image_ has channels, if not add singelton dimension
        if len(image_.shape) == 4:
            image_.shape = image_.shape + (1,)

        # Add data item di to dataMgr
        di = DataItemImage('')
        di.setDataVol(DataAccessor(image_))
        dataMgr.append(di, alreadyLoaded=True)

        fileName = self.params["ilastik_classifier"]
        ilastik_class = self.params["ilastik_class_selector"]

        hf = h5py.File(fileName,'r')
        temp = hf['classifiers'].keys()
        # If hf is not closed this leads to an error in win64 and mac os x
        hf.close()
        del hf

        classifiers = []
        for cid in temp:
            cidpath = 'classifiers/' + cid
            classifiers.append(ClassifierRandomForest.loadRFfromFile(fileName, str(cidpath)))

        dataMgr.module["Classification"]["classificationMgr"].classifiers = classifiers

        # Restore user selection of feature items from hdf5
        featureItems = []
        f = h5py.File(fileName,'r')
        for fgrp in f['features'].values():
            featureItems.append(FeatureBase.deserialize(fgrp))
        f.close()
        del f
        fm = FeatureMgr(dataMgr, featureItems)



        # Create FeatureMgr


        # Compute features

        fm.prepareCompute(dataMgr)
        fm.triggerCompute()
        fm.joinCompute(dataMgr)

        # Predict with loaded classifier

        classificationPredict = ClassifierPredictThread(dataMgr)
        classificationPredict.start()
        classificationPredict.wait()

        if ilastik_class >= classificationPredict._prediction[0].shape[-1]:
            raise RuntimeError('ilastik output class not valid...')

        # Produce output image and select the probability map
        probMap = (classificationPredict._prediction[0][0,0,:,:, ilastik_class] * 255).astype(numpy.uint8)
        img_out = ccore.numpy_to_image(probMap, True)
        return img_out