def __init__(self, X, Y, parameters, conveyor): # Initialize parent class try: BaseEstimator.__init__(self, X, Y, parameters, conveyor) LOG.debug('Initialize BaseEstimator parent class') except Exception as e: LOG.error( f'Error initializing BaseEstimator parent class with exception: {e}' ) self.conveyor.setError( f'Error initializing BaseEstimator parent class with exception: {e}' ) return # Load estimator parameters self.estimator_parameters = self.param.getDict('PLSR_parameters') # Scale is hard-coded to False for making use of external scalers self.estimator_parameters['scale'] = False self.name = "PLSR" # Check if the model is quantitative if not self.param.getVal('quantitative'): LOG.error('PLSR only applies to quantitative data') self.conveyor.setError('PLSR only applies to quantitative data') return
def __init__(self, X, Y, parameters, conveyor): # Initialize parent class try: BaseEstimator.__init__(self, X, Y, parameters, conveyor) LOG.debug('Initialize BaseEstimator parent class') except Exception as e: LOG.error(f'Error initializing BaseEstimator parent class with exception: {e}') self.conveyor.setError(f'Error initializing BaseEstimator parent class with exception: {e}') return # Load estimator parameters self.estimator_parameters = self.param.getDict('SVM_parameters') # Load tune parameters self.tune_parameters = self.param.getDict('SVM_optimize') if self.param.getVal('quantitative'): # Remove parameters of SVC class and set the name self.name = "SVM-R" self.estimator_parameters.pop("class_weight", None) self.estimator_parameters.pop("probability", None) self.estimator_parameters.pop("decision_function_shape", None) self.estimator_parameters.pop("random_state", None) self.tune_parameters.pop("class_weight", None) self.tune_parameters.pop("random_state", None) self.tune_parameters.pop("probability", None) else: # Remove parameters of SVR class and set the name self.estimator_parameters.pop("epsilon", None) self.name = "SVM-C"
def __init__(self, X, Y, parameters, conveyor): # Initialize parent class try: BaseEstimator.__init__(self, X, Y, parameters, conveyor) LOG.debug('Initialize BaseEstimator parent class') except Exception as e: self.conveyor.setError( f'Error initializing BaseEstimator parent class with exception: {e}' ) LOG.error( f'Error initializing BaseEstimator parent class with exception: {e}' ) return # Load estimator parameters self.estimator_parameters = self.param.getDict('XGBOOST_parameters') # Load tune parameters self.tune_parameters = self.param.getDict('XGBOOST_optimize') if self.param.getVal('quantitative'): self.estimator_parameters['objective'] = 'reg:squarederror' self.name = "XGB-Regressor" else: self.estimator_parameters['objective'] = 'binary:logistic' self.name = "XGB-Classifier" # Missing value must be defined. Otherwyse it returns 'nan' which cannot be # converted to JSON and produces trouble in different points self.estimator_parameters['missing'] = -99.99999
def __init__(self, X, Y, parameters, conveyor): # Initialize parent class try: BaseEstimator.__init__(self, X, Y, parameters, conveyor) LOG.debug('Initialize BaseEstimator parent class') except Exception as e: self.conveyor.setError( f'Error initializing BaseEstimator parent class with exception: {e}' ) LOG.error( f'Error initializing BaseEstimator parent class with exception: {e}' ) return # Load estimator parameters self.estimator_parameters = self.param.getDict('RF_parameters') # Load tune parameters self.tune_parameters = self.param.getDict('RF_optimize') if self.param.getVal('quantitative'): self.name = "RF-R" self.tune_parameters.pop("class_weight") self.estimator_parameters.pop("class_weight") else: self.name = "RF-C"
def __init__(self, X, Y, parameters, conveyor): # Initialize parent class try: BaseEstimator.__init__(self,X, Y, parameters, conveyor) LOG.debug('Initialize BaseEstimator parent class') except Exception as e: LOG.error(f'Error initializing BaseEstimator parent class with exception: {e}') self.conveyor.setError(f'Error initializing BaseEstimator parent class with exception: {e}') return # Load estimator parameters self.estimator_parameters = self.param.getDict('PLSDA_parameters') # Solves back-comptibility issue if 'optimize' in self.estimator_parameters: self.estimator_parameters.pop("optimize") # Scale is hard-coded to False for making use of external scalers self.estimator_parameters['scale'] = False self.name = "PLSDA" if 'threshold' in self.estimator_parameters: self.threshold = self.estimator_parameters['threshold'] else: self.threshold = 0.5 if self.param.getVal('quantitative'): self.conveyor.setError('PLSDA only applies to qualitative data') return # For confidential models, create an empty estimator if self.param.getVal('confidential'): self.estimator = PLS_da(**self.estimator_parameters)
def __init__(self, X, Y, parameters, conveyor): # Initialize parent class try: BaseEstimator.__init__(self, X, Y, parameters, conveyor) LOG.debug('Initialize BaseEstimator parent class') except Exception as e: LOG.error( f'Error initializing BaseEstimator parent class with exception: {e}' ) self.conveyor.setError( f'Error initializing BaseEstimator parent class with exception: {e}' ) return # Load estimator parameters self.estimator_parameters = self.param.getDict('PLSDA_parameters') # Scale is hard-coded to False for making use of external scalers self.estimator_parameters['scale'] = False self.name = "PLSDA" if self.param.getVal('quantitative'): LOG.error('PLSDA only applies to qualitative data') self.conveyor.setError('PLSDA only applies to qualitative data') return if self.param.getVal('conformal'): LOG.error('Conformal prediction no implemented in PLSDA yet') self.conveyor.setError( 'Conformal prediction no implemented in PLSDA yet') return
def __init__(self, X, Y, parameters, conveyor): # Initialize parent class try: BaseEstimator.__init__(self, X, Y, parameters, conveyor) LOG.debug('Initialize BaseEstimator parent class') except Exception as e: LOG.error(f'Error initializing BaseEstimator parent' f'class with exception: {e}') raise e self.method_name = ''
def __init__(self, X, Y, parameters, conveyor): # Initialize parent class try: BaseEstimator.__init__(self, X, Y, parameters, conveyor) LOG.debug('Initializing BaseEstimator parent class') except Exception as e: self.conveyor.setError(f'Error initializing BaseEstimator parent class with exception: {e}') LOG.error(f'Error initializing BaseEstimator parent class with exception: {e}') return # Load estimator parameters self.estimator_parameters = self.param.getDict('GNB_parameters') if self.param.getVal('quantitative'): self.conveyor.setError('GNB only applies to qualitative data') LOG.error('GNB only applies to qualitative data') else: self.name = "GNB-Classifier"
def __init__(self, X, Y, parameters, conveyor): # Initialize parent class try: BaseEstimator.__init__(self, X, Y, parameters, conveyor) LOG.debug('Initializing BaseEstimator parent class') except Exception as e: self.conveyor.setError( f'Error initializing BaseEstimator parent class with exception: {e}' ) LOG.error( f'Error initializing BaseEstimator parent class with exception: {e}' ) return # Load estimator parameters GNB_parameters = self.param.getDict('GNB_parameters') priors = [0.0, 0.0] if 'prior_negative' in GNB_parameters and GNB_parameters[ 'prior_negative'] != None: priors[0] = GNB_parameters['prior_negative'] if 'prior_positive' in GNB_parameters and GNB_parameters[ 'prior_positive'] != None: priors[1] = GNB_parameters['prior_positive'] self.estimator_parameters = {} if GNB_parameters['var_smoothing'] is not None: self.estimator_parameters['var_smoothing'] = GNB_parameters[ 'var_smoothing'] if priors[0] != 0.0 and priors[1] != 0.0: if priors[0] + priors[1] != 1.0: LOG.error( f'GNB: the sum of the priors should be 1. priors set to {priors[0], priors[1]} ' ) priors[1] = 1.0 - priors[0] self.estimator_parameters['priors'] = priors # self.param.setInnerVal('GNB_parameters','priors',priors) if self.param.getVal('quantitative'): self.conveyor.setError('GNB only applies to qualitative data') LOG.error('GNB only applies to qualitative data') else: self.name = "GNB-Classifier"
def __init__(self, X, Y, parameters, conveyor): # Initialize parent class try: BaseEstimator.__init__(self, X, Y, parameters, conveyor) LOG.debug('Initialize BaseEstimator parent class') except Exception as e: self.conveyor.setError( f'Error initializing BaseEstimator parent class with exception: {e}' ) LOG.error( f'Error initializing BaseEstimator parent class with exception: {e}' ) return # Load estimator parameters self.estimator_parameters = self.param.getDict('RF_parameters') # patch to solve bug in the object_type definition of max_depth # should not appear from versions older than 20/01/2021 if 'max_depth' in self.estimator_parameters: v = self.estimator_parameters['max_depth'] if v is not None: try: self.estimator_parameters['max_depth'] = int(v) except: self.estimator_parameters['max_depth'] = None # Load tune parameters self.tune_parameters = self.param.getDict('RF_optimize') if self.param.getVal('quantitative'): self.name = "RF-R" self.tune_parameters.pop("class_weight") self.estimator_parameters.pop("class_weight") else: self.name = "RF-C"
def __init__(self, X, Y, parameters, conveyor): # Initialize parent class try: BaseEstimator.__init__(self, X, Y, parameters, conveyor) LOG.debug('Initialize BaseEstimator parent class') except Exception as e: self.conveyor.setError( f'Error initializing BaseEstimator parent class with exception: {e}' ) LOG.error( f'Error initializing BaseEstimator parent class with exception: {e}' ) return # Load estimator parameters self.estimator_parameters = self.param.getDict('Keras_parameters') # Load tune parameters self.tune_parameters = self.param.getDict('Keras_optimize') if self.param.getVal('quantitative'): self.name = "Keras-Regressor" else: self.name = "Keras-Classifier"