Exemplo n.º 1
0
    def train(self, x, y):
        """
        training multiple estimators each for distinguishing a pair of classes.
        Args:
            X (numpy.ndarray): input points
            y (numpy.ndarray): input labels
        """
        self.estimators = []
        self.classes = np.unique(y)
        n_classes = self.classes.shape[0]
        code_size = int(n_classes * self.code_size)
        self.codebook = self.rand.random_sample((n_classes, code_size))
        self.codebook[self.codebook > 0.5] = 1
        self.codebook[self.codebook != 1] = 0
        classes_index = dict((c, i) for i, c in enumerate(self.classes))
        Y = np.array(
            [self.codebook[classes_index[y[i]]] for i in range(x.shape[0])],
            dtype=np.int)
        logger.info("Require {} estimators.".format(Y.shape[1]))
        for i in range(Y.shape[1]):
            y_bit = Y[:, i]
            unique_y = np.unique(y_bit)
            if len(unique_y) == 1:
                estimator = _ConstantPredictor()
                estimator.fit(x, unique_y)
            else:
                if self.params is None:
                    estimator = self.estimator_cls()
                else:
                    estimator = self.estimator_cls(*self.params)

                estimator.fit(x, y_bit)
            self.estimators.append(estimator)
Exemplo n.º 2
0
def _fit_binary(estimator, X, y, classes=None):
    """Fit a single binary estimator."""
    unique_y = np.unique(y)
    if len(unique_y) == 1:
        if classes is not None:
            if y[0] == -1:
                c = 0
            else:
                c = y[0]
            warnings.warn("Label %s is present in all training examples." %
                          str(classes[c]))
        estimator = _ConstantPredictor().fit(X, unique_y)
    else:
        estimator = clone(estimator)
        estimator.fit(X, y)
    return estimator
def _fit_binary(estimator, X, y, classes=None, sample_weight=None):
    """Fit a single binary estimator."""
    unique_y = np.unique(y)
    if len(unique_y) == 1:
        if classes is not None:
            if y[0] == -1:
                c = 0
            else:
                c = y[0]
            warnings.warn("Label %s is present in all training examples." %
                          str(classes[c]))
        estimator = _ConstantPredictor().fit(X, unique_y)
    else:
        estimator = clone(estimator)
        estimator.fit(X, y, sample_weight=None)
    return estimator
Exemplo n.º 4
0
def _fit_binary(estimator,
                x,
                y,
                classes=None,
                sample_weight=None,
                eval_set=None,
                eval_metric=None,
                early_stopping_rounds=None,
                verbose=True,
                xgb_model=None,
                sample_weight_eval_set=None,
                callbacks=None):
    """Fit a single binary estimator.
    This function is ported from sklearn multiclass module."""
    unique_y = np.unique(y)
    if len(unique_y) == 1:
        if classes is not None:
            if y[0] == -1:
                c = 0
            else:
                c = y[0]
            warnings.warn(
                f"Label {str(classes[c])} is present in all training examples."
            )
        estimator = _ConstantPredictor().fit(x, unique_y)
    else:
        estimator = clone(estimator)
        estimator.fit(x,
                      y,
                      sample_weight=sample_weight,
                      eval_set=eval_set,
                      eval_metric=eval_metric,
                      early_stopping_rounds=early_stopping_rounds,
                      verbose=verbose,
                      xgb_model=xgb_model,
                      sample_weight_eval_set=sample_weight_eval_set,
                      callbacks=callbacks)
    return estimator