Пример #1
0
    def fit(self, X, y):
        # Check the algorithm parameters
        self._check_params()

        # Check that X and y have correct shape
        X, y = check_X_y(X, y, y_numeric=False, dtype=[np.single, np.double])

        check_classification_targets(y)

        # Encode labels
        le = preprocessing.LabelEncoder()
        le.fit(y)
        self.classes_ = le.classes_
        y_ = le.transform(y)

        # Convert to 2d array
        y_ = y_.reshape((-1, 1))

        self.n_outputs_ = y_.shape[1]

        self.n_classes_ = len(self.classes_)

        self.n_features_ = X.shape[1]

        # Classifier can't train when only one class is present.
        # Trivial case
        if self.n_classes_ == 1:
            return self

        # Get random seed
        rs_ = check_random_state(self.random_state)
        seed_ = rs_.randint(0, np.iinfo('i').max)

        # Define type of data
        fptype = getFPType(X)

        # Fit the model
        train_algo = d4p.gbt_classification_training(
            fptype=fptype,
            nClasses=self.n_classes_,
            splitMethod=self.split_method,
            maxIterations=self.max_iterations,
            maxTreeDepth=self.max_tree_depth,
            shrinkage=self.shrinkage,
            minSplitLoss=self.min_split_loss,
            lambda_=self.reg_lambda,
            observationsPerTreeFraction=self.observations_per_tree_fraction,
            featuresPerNode=self.features_per_node,
            minObservationsInLeafNode=self.min_observations_in_leaf_node,
            memorySavingMode=self.memory_saving_mode,
            maxBins=self.max_bins,
            minBinSize=self.min_bin_size,
            engine=d4p.engines_mcg59(seed=seed_))
        train_result = train_algo.compute(X, y_)

        # Store the model
        self.daal_model_ = train_result.model

        # Return the classifier
        return self
Пример #2
0
    def fit(self, X, y):
        # Check the algorithm parameters
        self._check_params()

        # Check that X and y have correct shape
        X, y = check_X_y(X, y, y_numeric=True, dtype=[np.single, np.double])

        # Convert to 2d array
        y_ = y.reshape((-1, 1))

        self.n_features_ = X.shape[1]

        # Get random seed
        rs_ = check_random_state(self.random_state)
        seed_ = rs_.randint(0, np.iinfo('i').max)

        # Define type of data
        fptype = getFPType(X)

        # Fit the model
        train_algo = d4p.gbt_regression_training(
            fptype=fptype,
            splitMethod=self.split_method,
            maxIterations=self.max_iterations,
            maxTreeDepth=self.max_tree_depth,
            shrinkage=self.shrinkage,
            minSplitLoss=self.min_split_loss,
            lambda_=self.reg_lambda,
            observationsPerTreeFraction=self.observations_per_tree_fraction,
            featuresPerNode=self.features_per_node,
            minObservationsInLeafNode=self.min_observations_in_leaf_node,
            memorySavingMode=self.memory_saving_mode,
            maxBins=self.max_bins,
            minBinSize=self.min_bin_size,
            engine=d4p.engines_mcg59(seed=seed_))
        train_result = train_algo.compute(X, y_)

        # Store the model
        self.daal_model_ = train_result.model

        # Return the classifier
        return self
Пример #3
0
    def fit(self, X, y):
        # Check the algorithm parameters
        if not ((isinstance(self.n_neighbors, numbers.Integral)) and
                (self.n_neighbors > 0)):
            raise ValueError('Parameter "n_neighbors" must be '
                             'non-zero positive integer value.')
        if not self.weights == 'uniform':
            warnings.warn('Value "{}" for argument "weights" not supported. '
                          'Using default "uniform".'.format(self.weights),
                          RuntimeWarning,
                          stacklevel=2)
            self.weights = 'uniform'
        if not self.algorithm == 'kd_tree':
            warnings.warn('Value "{}" for argument "algorithm" not supported. '
                          'Using default "kd_tree".'.format(self.algorithm),
                          RuntimeWarning,
                          stacklevel=2)
            self.algorithm = 'kd_tree'
        if not self.leaf_size == 31:
            warnings.warn('Value "{}" for argument "leaf_size" not supported. '
                          'Using default "31".'.format(self.leaf_size),
                          RuntimeWarning,
                          stacklevel=2)
            self.leaf_size = 31
        if not self.p == 2:
            warnings.warn('Value "{}" for argument "p" not supported. '
                          'Using default "2".'.format(self.p),
                          RuntimeWarning,
                          stacklevel=2)
            self.p = 2
        if not self.metric == 'minkowski':
            warnings.warn('Value "{}" for argument "metric" not supported. '
                          'Using default "minkowski".'.format(self.metric),
                          RuntimeWarning,
                          stacklevel=2)
            self.metric = 'minkowski'
        if self.metric_params is not None:
            warnings.warn(
                'Argument "metric_params" not (yet) supported. '
                'Ignored.',
                RuntimeWarning,
                stacklevel=2)
            self.metric_params = None
        if self.n_jobs is not None:
            warnings.warn(
                'Argument "n_jobs" not (yet) supported. '
                'Ignored. All available processors will be used.',
                RuntimeWarning,
                stacklevel=2)
            self.n_jobs = None

        # Check that X and y have correct shape
        X, y = check_X_y(X, y, y_numeric=False, dtype=[np.single, np.double])

        check_classification_targets(y)

        # Encode labels
        le = preprocessing.LabelEncoder()
        le.fit(y)
        self.classes_ = le.classes_
        y_ = le.transform(y)

        # Convert to 2d array
        y_ = y_.reshape((-1, 1))

        self.n_classes_ = len(self.classes_)

        self.n_features_ = X.shape[1]

        # Classifier can't train when only one class is present.
        # Trivial case
        if self.n_classes_ == 1:
            return self

        # Get random seed
        rs = check_random_state(None)
        self.seed_ = rs.randint(np.iinfo('i').max)

        # Define type of data
        fptype = getFPType(X)

        # Fit the model
        train_algo = d4p.kdtree_knn_classification_training(
            fptype=fptype, engine=d4p.engines_mcg59(seed=self.seed_))
        train_result = train_algo.compute(X, y_)

        # Store the model
        self.daal_model_ = train_result.model

        # Return the classifier
        return self