Exemple #1
0
    def __init__(self, imbalance_upsampling=None, class_weight=None, method=None, random_state=1, log=None):
        """
        Construtor

        :param imbalance_upsampling:    Use upsampling to compensate imbalanced dataset
        :param class_weight:            Use class_weight to compensate imbalanced dataset
        :param method:                  [Optional] Ensemble method
        :param random_state:            Random state
        :param log:                     Log
        """
        MlModelCommon.__init__(self,
                               imbalance_upsampling=imbalance_upsampling,
                               class_weight=class_weight,
                               method=method,
                               log=log)
        #
        #   GaussianNB does not support class_weight
        #
        if method == "Bagging":
            model = GaussianNB()
            self.ensemble_method = BaggingClassifier(base_estimator=model,
                                                     n_estimators=100,
                                                     random_state=random_state)
        elif method == "Adaptive Boosting":
            model = GaussianNB()
            self.ensemble_method = AdaBoostClassifier(base_estimator=model,
                                                      n_estimators=100,
                                                      random_state=random_state)
        else:
            self.ensemble_method = None
            GaussianNB.__init__(self)
Exemple #2
0
    def __init__(self,
                 imbalance_upsampling=None,
                 class_weight=None,
                 method=None,
                 random_state=10,
                 log=None):

        MlModelCommon.__init__(self,
                               imbalance_upsampling=imbalance_upsampling,
                               class_weight=class_weight,
                               method=method,
                               log=log)

        if method == "Bagging":
            model = DecisionTreeClassifier(class_weight=class_weight,
                                           min_samples_split=20,
                                           random_state=99)
            self.ensemble_method = BaggingClassifier(base_estimator=model,
                                                     n_estimators=10,
                                                     random_state=random_state)
        elif method == "Adaptive Boosting":
            model = DecisionTreeClassifier(class_weight=class_weight,
                                           min_samples_split=20,
                                           random_state=99)
            self.ensemble_method = AdaBoostClassifier(
                base_estimator=model,
                n_estimators=50,
                random_state=random_state)
        else:
            self.ensemble_method = None
            DecisionTreeClassifier.__init__(self,
                                            class_weight=class_weight,
                                            min_samples_split=20,
                                            random_state=99)
Exemple #3
0
    def __init__(self,
                 imbalance_upsampling=None,
                 class_weight=None,
                 method=None,
                 c=100.0,
                 random_state=1,
                 log=None):

        MlModelCommon.__init__(self,
                               imbalance_upsampling=imbalance_upsampling,
                               class_weight=class_weight,
                               method=method,
                               log=log)

        if method == "Bagging":
            model = BernoulliNB()
            self.ensemble_method = BaggingClassifier(base_estimator=model,
                                                     n_estimators=10,
                                                     random_state=random_state)
        elif method == "Adaptive Boosting":
            model = BernoulliNB()
            self.ensemble_method = AdaBoostClassifier(
                base_estimator=model,
                n_estimators=10,
                random_state=random_state)
        else:
            #
            # BernoulliNB does not support class_weight
            #
            BernoulliNB.__init__(self)
            self.ensemble_method = None
    def __init__(self, strategy="stratified", log=None):
        """

        :param self:
        :param strategy: Possible choices
                "stratified": Predict by respecting the training set's class distribution
                "most_frequent": Always predict the majority
                "prior": Maximize the lass prior
                "uniform": Generates uniform prediction randomly
                "constant": Predict a given value always
        :param log:
        :return:
        """
        MlModelCommon.__init__(self, log=log)
        DummyClassifier.__init__(self, strategy=strategy)
Exemple #5
0
    def __init__(self,
                 imbalance_upsampling=None,
                 class_weight=None,
                 method=None,
                 c=100.0,
                 random_state=1,
                 log=None):
        """
        Initialize the model
        :param imbalance_upsampling:    Using upsampling to compensate imbalanced dataset
        :param class_weight:            It can be None, "balanced", or a dict. Used for imbalance class
        :param method:                  Optional ensemble method
        :param c:                       Not supported yet.
        :param random_state:            Random state
        :param log:                     log
        """
        self.c = c
        self.random_state = random_state
        MlModelCommon.__init__(self,
                               imbalance_upsampling=imbalance_upsampling,
                               class_weight=class_weight,
                               method=method,
                               log=log)

        if method == "Bagging":
            model = LgRegression(C=c,
                                 class_weight=class_weight,
                                 random_state=random_state)
            self.ensemble_method = BaggingClassifier(base_estimator=model,
                                                     n_estimators=200,
                                                     random_state=random_state)
        elif method == "Adaptive Boosting":
            model = LgRegression(C=c,
                                 class_weight=class_weight,
                                 random_state=random_state)
            self.ensemble_method = AdaBoostClassifier(
                base_estimator=model,
                n_estimators=200,
                random_state=random_state)
        else:
            self.ensemble_method = None
            LgRegression.__init__(self,
                                  C=c,
                                  random_state=random_state,
                                  class_weight=class_weight)
    def __init__(self,
                 imbalance_upsampling=None,
                 class_weight=None,
                 random_state=1,
                 n_neighbors=5,
                 method=None,
                 log=None):
        """

        :param imbalance_upsampling:    Use upsampling to compensate imbalance
        :param class_weight:            Use class_weight to compensate imbalance
        :param random_state:            Random state
        :param n_neighbors:             Number of neighbor samples to use
        :param method:                  Ensemble method
        :param log:                     Log
        """
        MlModelCommon.__init__(self,
                               imbalance_upsampling=imbalance_upsampling,
                               class_weight=class_weight,
                               method=method,
                               log=log)

        #
        #   class_weight is not supported for KNN.
        #
        if method == "Bagging":
            model = KNeighborsClassifier(n_neighbors=n_neighbors,
                                         metric="minkowski")
            self.ensemble_method = BaggingClassifier(base_estimator=model,
                                                     n_estimators=10,
                                                     random_state=random_state)
        elif method == "Adaptive Boosting":
            model = KNeighborsClassifier(n_neighbors=n_neighbors,
                                         metric="minkowski")
            self.ensemble_method = AdaBoostClassifier(
                base_estimator=model,
                n_estimators=10,
                random_state=random_state)
        else:
            self.ensemble_method = None
            KNeighborsClassifier.__init__(self,
                                          n_neighbors=n_neighbors,
                                          metric="minkowski")
 def __init__(self,
              imbalance_upsampling=None,
              class_weight=None,
              method=None,
              log=None):
     MlModelCommon.__init__(self,
                            imbalance_upsampling=imbalance_upsampling,
                            class_weight=class_weight,
                            method=method,
                            log=log)
     #
     # Random forest is a special case of bagging of
     # decision tree. Might not make sense to
     # add ensemble method.
     #
     self.ensemble_method = None
     RandomForestClassifier.__init__(self,
                                     class_weight=class_weight,
                                     n_estimators=100,
                                     random_state=99)