def __init__(self, normalizer: BaseNormalizer = None, randomSeed: int = None, criterion: str = "Gini Impurity"): """ lassifier initialization. :param normalizer: Normalizer used for input data. If none than normalization/scalling is omitted. :type normalizer: None | BaseNormalizer :param randomSeed: If not None than fixed seed is used. :type randomSeed: int :param criterion: The function to measure the quality of a split. :type criterion: str """ self._normalizer = PluginAttribute( "Normalize", PluginAttribute.PluginAttributeType.SELECTABLE_PLUGIN, None, [ None, NormalizerPlugin, MinMaxScalerPlugin, StandardScalerPlugin, RobustScalerPlugin ]) self._normalizer.value = normalizer self._randomSeed = PluginAttribute( "Random seed", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(couldBeNone=True)) self._randomSeed.value = randomSeed self._criterion = PluginAttribute( "Split criterion", PluginAttribute.PluginAttributeType.SELECTABLE, None, ["Gini Impurity", "Information Gain"]) self._criterion.value = criterion
def __init__(self, normalizer: BaseNormalizer = None, typeV: str = "gaussian"): """ Classifier initialization. :param normalizer: Normalizer used for input data. If none than normalization/scalling is omitted. :type normalizer: None | BaseNormalizer :param typeV: Type of naive bayes. multinomial - Discrete non negative values like counts or something like that. gaussian - Continuous values. Likelihood of features is assumed to be gaussian. :type typeV: str """ #object self._normalizer = PluginAttribute( "Normalize", PluginAttribute.PluginAttributeType.SELECTABLE_PLUGIN, None, [ None, NormalizerPlugin, MinMaxScalerPlugin, StandardScalerPlugin, RobustScalerPlugin ]) self._normalizer.value = normalizer self._type = PluginAttribute( "Type", PluginAttribute.PluginAttributeType.SELECTABLE, str, ["gaussian", "multinomial"]) self._type.value = typeV
def __init__(self, nonNegative: bool = False, nFeatures: int = 100, norm="l2"): """ Initialize Hashing features extractor. :param nonNegative: Generate only nonnegative values (True). :type nonNegative: bool :param nFeatures: Number of features :type nFeatures: int :param norm: Type of normalization. :type norm: str|None """ self._nonNegativ = PluginAttribute( "Non negative values", PluginAttribute.PluginAttributeType.CHECKABLE, bool) self._nonNegativ.value = nonNegative self._nFeatures = PluginAttribute( "Number of features", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(minV=1)) self._nFeatures.value = nFeatures self._norm = PluginAttribute( "Normalization", PluginAttribute.PluginAttributeType.SELECTABLE, None, [None, "l1", "l2"]) self._norm.value = norm self._vectorizer = None
def __init__(self, maxFeatures: int = None, caseSensitive: bool = False, norm="l2"): """ Feature extractor initialization. :param maxFeatures: Limit to the maximum number of features. None means unlimited. :type maxFeatures: None | int :param caseSensitive: True means that we want to be case senstive. :type caseSensitive: bool :param norm: Type of normalization. :type norm: str|None """ self._maxFeatures = PluginAttribute( "Max number of features", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(minV=1, couldBeNone=True)) self._maxFeatures.value = maxFeatures self._caseSensitive = PluginAttribute( "Case sensitive", PluginAttribute.PluginAttributeType.CHECKABLE, bool) self._caseSensitive.value = caseSensitive self._norm = PluginAttribute( "Normalization", PluginAttribute.PluginAttributeType.SELECTABLE, None, [ None, "l1", "l2", ]) self._norm.value = norm
def __init__(self, neurons: int = 1, activation: str = 'relu'): """ Layer initialization. :param neurons: Number of neurons in layer. :type neurons: int :param activation: activation function :type activation: str """ self.neurons = PluginAttribute( "Neurons", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(minV=1)) self.neurons.value = neurons self.activation = PluginAttribute( "Activation function", PluginAttribute.PluginAttributeType.SELECTABLE, str, ["relu", "sigmoid", "softmax"]) self.activation.value = activation
def __init__(self, normalizer: BaseNormalizer = None, neighbors: int = 3): """ lassifier initialization. :param normalizer: Normalizer used for input data. If none than normalization/scalling is omitted. :type normalizer: None | BaseNormalizer :param neighbors: Number of neighbors (k). :type neighbors: int """ self._normalizer = PluginAttribute( "Normalize", PluginAttribute.PluginAttributeType.SELECTABLE_PLUGIN, None, [ None, NormalizerPlugin, MinMaxScalerPlugin, StandardScalerPlugin, RobustScalerPlugin ]) self._normalizer.value = normalizer self._neighbors = PluginAttribute( "Neighbors", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(minV=1)) self._neighbors.value = neighbors
def __init__(self, orientationsBins:int=9, pixelsPerCellHorizontal:int=8, \ pixelsPerCellVertical:int=8, cellsPerBlockHorizontal:int=3, \ cellsPerBlockVertical:int=3, blockNorm:str="L2-Hys"): """ Feature extractor initialization. :param orientationsBins: Number of orientation bins. :type orientationsBins: int :param pixelsPerCellHorizontal: Width (in pixels) of a cell. :type pixelsPerCellHorizontal: int :param pixelsPerCellVertical: Height (in pixels) of a cell. :type pixelsPerCellVertical: int :param cellsPerBlockHorizontal: Number of cells in each block (horizontal). :type cellsPerBlockHorizontal: int :param cellsPerBlockVertical: Number of cells in each block (vertical). :type cellsPerBlockVertical: int :param blockNorm: Block normalization method. :type blockNorm: str """ self._orientationsBins = PluginAttribute( "Number of orientation bins", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(minV=1)) self._orientationsBins.value = orientationsBins self._pixelsPerCellHorizontal = PluginAttribute( "Width of a cell [px]", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(minV=1)) self._pixelsPerCellHorizontal.value = pixelsPerCellHorizontal self._pixelsPerCellVertical = PluginAttribute( "Height of a cell [px]", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(minV=1)) self._pixelsPerCellVertical.value = pixelsPerCellVertical self._cellsPerBlockHorizontal = PluginAttribute( "Number of cells in each block (horizontal)", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(minV=1)) self._cellsPerBlockHorizontal.value = cellsPerBlockHorizontal self._cellsPerBlockVertical = PluginAttribute( "Number of cells in each block (vertical)", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(minV=1)) self._cellsPerBlockVertical.value = cellsPerBlockVertical self._blockNorm = PluginAttribute( "Normalization", PluginAttribute.PluginAttributeType.SELECTABLE, None, ["L1", "L1-sqrt", "L2", "L2-Hys"]) self._blockNorm.value = blockNorm
def __init__(self, normalizer: BaseNormalizer = None, randomSeed: int = None, epochs: int = 10, batchSize: int = 32, learningRate: float = 0.001, gpu: bool = True, outLactFun: str = "softmax", log: bool = True): """ Classifier initialization. :param normalizer: Normalizer used for input data. If none than normalization/scalling is omitted. :type normalizer: None | BaseNormalizer :param randomSeed: If not None than fixed seed is used. :type randomSeed: int :param epochs: Number of training epochs. :type epochs: int :param batchSize: Number of samples processed before weights are updated. :type batchSize: int :param learningRate: How big step we do when we learn (in direction of gradient). :type learningRate: int :param gpu: Should GPU be used? :type gpu: bool :param outLactFun: Activation function for output layer :type outLactFun: str :param log: Should log or not? :type log: bool """ normalizer = MinMaxScalerPlugin(-1, 1) #object self._normalizer = PluginAttribute( "Normalize", PluginAttribute.PluginAttributeType.SELECTABLE_PLUGIN, None, [ None, NormalizerPlugin, MinMaxScalerPlugin, StandardScalerPlugin, RobustScalerPlugin ]) self._normalizer.value = normalizer self._randomSeed = PluginAttribute( "Random seed", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(couldBeNone=True)) self._randomSeed.value = randomSeed self._epochs = PluginAttribute( "Epochs", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(minV=1)) self._epochs.value = epochs self._batchSize = PluginAttribute( "Batch size", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(minV=1)) self._batchSize.value = batchSize self._learningRate = PluginAttribute( "Learning rate", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeFloatChecker()) self._learningRate.value = learningRate self._gpu = PluginAttribute( "GPU", PluginAttribute.PluginAttributeType.CHECKABLE, bool) self._gpu.value = gpu self._outLactFun = PluginAttribute( "Output layer activation function", PluginAttribute.PluginAttributeType.SELECTABLE, str, ["relu", "sigmoid", "softmax"]) self._outLactFun.value = outLactFun self._log = PluginAttribute( "Log epoch", PluginAttribute.PluginAttributeType.CHECKABLE, bool) self._log.value = log self._hiddenLayers = PluginAttribute( "Hidden layers", PluginAttribute.PluginAttributeType.GROUP_PLUGINS, Layer) self._hiddenLayers.groupItemLabel = "Hidden layer {}" self._CUDA_VISIBLE_DEVICES_CACHED = None #to remember initial state when GPU is switched off
def __init__(self, normalizer:BaseNormalizer=None, generations:int=100, stopAccuracy:float=None, \ population:int=10, selectionMethod:Selector="RANK", randomSeed:int=None, maxCrossovers:int=1, maxMutations:int=5, \ maxStartSlots=2, crossoverProb:float=0.75, testSetSize:float=1, changeTestSet:bool=False, logGenFitness:bool=True): """ Classifier initialization. :param normalizer: Normalizer used for input data. If none than normalization/scalling is omitted. :type normalizer: None | BaseNormalizer :param generations: Maximum number of generations. :type generations: int :param stopAccuracy: Stop evolution when accuracy reaches concrete value. :type stopAccuracy: None | float :param population: Population size. :type population: int :param selectionMethod: Selection method for evolution. :type selectionMethod: Selector :param randomSeed: If not None than fixed seed is used. :type randomSeed: int :param maxCrossovers: Maximum number of crossovers when creating generation. :type maxCrossovers: int :param maxMutations: Maximum number of changed genes in one mutation. :type maxMutations: int :param maxStartSlots: Maximum number of slots for start. (minimal is always 1) :type maxStartSlots: int :param crossoverProb: Probability of crossover between two selected individuals. If random says no crossover than one of parent is randomly chosen and its chromosome is used. :type crossoverProb: float :param testSetSize: Size of test set, that is used for fitness score calculation. :type testSetSize: float :param changeTestSet: Change test set for every generation. :type changeTestSet: bool :param logGenFitness: Log generation fitness. :type logGenFitness: bool """ self._normalizer=PluginAttribute("Normalize", PluginAttribute.PluginAttributeType.SELECTABLE_PLUGIN, None, [None, NormalizerPlugin, MinMaxScalerPlugin, StandardScalerPlugin, RobustScalerPlugin]) self._normalizer.value=normalizer self._generations=PluginAttribute("Number of generations", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(minV=0)) self._generations.value=generations self._stopAccuracy=PluginAttribute("Stop accuracy", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeFloatChecker(minV=0.0, maxV=1.0,couldBeNone=True)) self._stopAccuracy.value=stopAccuracy self._population=PluginAttribute("Population size", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(minV=1)) self._population.value=population self._selectionMethod=PluginAttribute("Selection method", PluginAttribute.PluginAttributeType.SELECTABLE, None, list(self.SELECTION_METHODS.keys())) self._selectionMethod.value=selectionMethod self._randomSeed=PluginAttribute("Random seed", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(couldBeNone=True)) self._randomSeed.value=randomSeed self._maxCrossovers=PluginAttribute("Max crossovers in generation", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(minV=1)) self._maxCrossovers.value=maxCrossovers self._maxMutations=PluginAttribute("Max mutations", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(minV=0)) self._maxMutations.value=maxMutations self._maxStartSlots=PluginAttribute("Max start slots", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeIntChecker(minV=2)) self._maxStartSlots.value=maxStartSlots self._crossoverProb=PluginAttribute("Crossover probability", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeFloatChecker(minV=0.0, maxV=1.0)) self._crossoverProb.value=crossoverProb self._testSetSize=PluginAttribute("Test set size", PluginAttribute.PluginAttributeType.VALUE, PluginAttributeFloatChecker(minV=0.0, maxV=1.0)) self._testSetSize.value=testSetSize self._changeTestSet=PluginAttribute("Change test set for each generation", PluginAttribute.PluginAttributeType.CHECKABLE, bool) self._changeTestSet.value=changeTestSet self._logGenFitness=PluginAttribute("Log generation fitness", PluginAttribute.PluginAttributeType.CHECKABLE, bool) self._logGenFitness.value=logGenFitness self._evolvedCls=None #there the evolved classifier will be stored