Example #1
0
    def parse_classifier_file(self, workspace):
        d = self.get_dictionary(workspace.image_set_list)
        if all([d.has_key(k) for k in (CLASSIFIERS_KEY, FEATURE_ITEMS_KEY)]):
            return
        
        # Load classifier from hdf5
        fileName = str(os.path.join(self.h5_directory.get_absolute_path(), 
                                    self.classifier_file_name.value))
        
        hf = h5py.File(fileName,'r')
        temp = hf['classifiers'].keys()
        # If hf is not closed this leads to an error in win64 and mac os x
        hf.close()
        del hf
        
        classifiers = []
        for cid in temp:
            cidpath = 'classifiers/' + cid
            try:
                classifiers.append(ClassifierRandomForest.deserialize(fileName, cidpath))
            except:
                classifiers.append(ClassifierRandomForest.loadRFfromFile(fileName, cidpath))

        d[CLASSIFIERS_KEY] = classifiers
        
        # Restore user selection of feature items from hdf5
        featureItems = []
        f = h5py.File(fileName,'r')
        for fgrp in f['features'].values():
            featureItems.append(FeatureBase.deserialize(fgrp))
        f.close()
        del f
        d[FEATURE_ITEMS_KEY] = featureItems
Example #2
0
    def parse_classifier_file(self, workspace):
        global classifier_dict
        # Load classifier from hdf5
        fileName = str(
            os.path.join(self.h5_directory.get_absolute_path(),
                         self.classifier_file_name.value))
        modtime = os.stat(fileName).st_mtime
        if fileName in classifier_dict:
            last_modtime, d = classifier_dict[fileName]
            if modtime == last_modtime:
                return d

        d = {}
        hf = h5py.File(fileName, 'r')
        temp = hf['classifiers'].keys()
        # If hf is not closed this leads to an error in win64 and mac os x
        hf.close()
        del hf

        classifiers = []
        for cid in temp:
            cidpath = 'classifiers/' + cid
            try:
                classifiers.append(
                    ClassifierRandomForest.deserialize(fileName, cidpath))
            except:
                classifiers.append(
                    ClassifierRandomForest.loadRFfromFile(fileName, cidpath))

        d[CLASSIFIERS_KEY] = classifiers

        # Restore user selection of feature items from hdf5
        featureItems = []
        f = h5py.File(fileName, 'r')
        for fgrp in f['features'].values():
            featureItems.append(FeatureBase.deserialize(fgrp))
        f.close()
        del f
        d[FEATURE_ITEMS_KEY] = featureItems
        classifier_dict[fileName] = (modtime, d)
        return d
    def parse_classifier_hdf5(self, filename):
        '''Parse the classifiers out of the HDF5 file
        
        filename - name of classifier file
        
        returns a dictionary
           CLASSIFIERS_KEY - the random forest classifiers
           FEATURE_ITEMS_KEY - the features needed by the classifier
        '''
        d = {}
        if not isinstance(filename, str):
            filename = filename.encode('utf-8')
        hf = h5py.File(filename,'r')
        temp = hf['classifiers'].keys()
        # If hf is not closed this leads to an error in win64 and mac os x
        hf.close()
        del hf
        
        classifiers = []
        for cid in temp:
            if isinstance(cid, unicode):
                cid = cid.encode('utf-8')
            cidpath = 'classifiers/' + cid
            try:
                classifiers.append(
                    ClassifierRandomForest.deserialize(filename, cidpath))
            except:
                classifiers.append(
                    ClassifierRandomForest.loadRFfromFile(filename, cidpath))

        d[CLASSIFIERS_KEY] = classifiers
        
        # Restore user selection of feature items from hdf5
        featureItems = []
        f = h5py.File(filename,'r')
        for fgrp in f['features'].values():
            featureItems.append(FeatureBase.deserialize(fgrp))
        d[FEATURE_ITEMS_KEY] = featureItems
        f.close()
        del f
        return d
    def parse_classifier_hdf5(self, filename):
        '''Parse the classifiers out of the HDF5 file

        filename - name of classifier file

        returns a dictionary
           CLASSIFIERS_KEY - the random forest classifiers
           FEATURE_ITEMS_KEY - the features needed by the classifier
        '''
        d = {}
        if not isinstance(filename, str):
            filename = filename.encode('utf-8')
        hf = h5py.File(filename, 'r')
        temp = hf['classifiers'].keys()
        # If hf is not closed this leads to an error in win64 and mac os x
        hf.close()
        del hf

        classifiers = []
        for cid in temp:
            if isinstance(cid, unicode):
                cid = cid.encode('utf-8')
            cidpath = 'classifiers/' + cid
            try:
                classifiers.append(
                    ClassifierRandomForest.deserialize(filename, cidpath))
            except:
                classifiers.append(
                    ClassifierRandomForest.loadRFfromFile(filename, cidpath))

        d[CLASSIFIERS_KEY] = classifiers

        # Restore user selection of feature items from hdf5
        featureItems = []
        f = h5py.File(filename, 'r')
        for fgrp in f['features'].values():
            featureItems.append(FeatureBase.deserialize(fgrp))
        d[FEATURE_ITEMS_KEY] = featureItems
        f.close()
        del f
        return d
Example #5
0
    def parse_classifier_file(self, workspace):
        global classifier_dict
        # Load classifier from hdf5
        fileName = str(os.path.join(self.h5_directory.get_absolute_path(), 
                                    self.classifier_file_name.value))
        modtime = os.stat(fileName).st_mtime
        if fileName in classifier_dict:
            last_modtime, d = classifier_dict[fileName]
            if modtime == last_modtime:
                return d
        
        d = {}
        hf = h5py.File(fileName,'r')
        temp = hf['classifiers'].keys()
        # If hf is not closed this leads to an error in win64 and mac os x
        hf.close()
        del hf
        
        classifiers = []
        for cid in temp:
            cidpath = 'classifiers/' + cid
            try:
                classifiers.append(ClassifierRandomForest.deserialize(fileName, cidpath))
            except:
                classifiers.append(ClassifierRandomForest.loadRFfromFile(fileName, cidpath))

        d[CLASSIFIERS_KEY] = classifiers
        
        # Restore user selection of feature items from hdf5
        featureItems = []
        f = h5py.File(fileName,'r')
        for fgrp in f['features'].values():
            featureItems.append(FeatureBase.deserialize(fgrp))
        f.close()
        del f
        d[FEATURE_ITEMS_KEY] = featureItems
        classifier_dict[fileName] = (modtime, d)
        return d
    def parse_classifier_file(self, workspace):
        d = self.get_dictionary(workspace.image_set_list)
        if all([d.has_key(k) for k in (CLASSIFIERS_KEY, FEATURE_ITEMS_KEY)]):
            return

        # Load classifier from hdf5
        fileName = str(
            os.path.join(self.h5_directory.get_absolute_path(),
                         self.classifier_file_name.value))

        hf = h5py.File(fileName, 'r')
        temp = hf['classifiers'].keys()
        # If hf is not closed this leads to an error in win64 and mac os x
        hf.close()
        del hf

        classifiers = []
        for cid in temp:
            cidpath = 'classifiers/' + cid
            try:
                classifiers.append(
                    ClassifierRandomForest.deserialize(fileName, cidpath))
            except:
                classifiers.append(
                    ClassifierRandomForest.loadRFfromFile(fileName, cidpath))

        d[CLASSIFIERS_KEY] = classifiers

        # Restore user selection of feature items from hdf5
        featureItems = []
        f = h5py.File(fileName, 'r')
        for fgrp in f['features'].values():
            featureItems.append(FeatureBase.deserialize(fgrp))
        f.close()
        del f
        d[FEATURE_ITEMS_KEY] = featureItems