def __init__(self, n_neighbors=5, weights='uniform', metric="dtw", metric_params=None): KNeighborsClassifier.__init__(self, n_neighbors=n_neighbors, weights=weights, algorithm='brute') self.metric = metric self.metric_params = metric_params
def __init__(self): KNeighborsClassifier.__init__( self, n_neighbors=5, weights='distance', algorithm='brute', leaf_size='40', p=2, metric='minkowski', metric_params=None, n_jobs=1, ) self.__name = 'k-NN'
def __init__(self, n_neighbors=5, weights='uniform', metric='dtw', metric_params=None, n_jobs=None, verbose=0): KNeighborsClassifier.__init__(self, n_neighbors=n_neighbors, weights=weights, algorithm='brute') self.metric = metric self.metric_params = metric_params self.n_jobs = n_jobs self.verbose = verbose
def __init__(self, n_neighbors=5, weights='uniform', algorithm='auto', leaf_size=30, p=2, metric='minkowski', metric_params=None, n_jobs=1, **kwargs): leaf_size = int(leaf_size) n_neighbors = int(n_neighbors) _KNeighborsClassifier.__init__(self, n_neighbors, weights, algorithm, leaf_size, p, metric, metric_params, n_jobs, **kwargs) BaseWrapperClf.__init__(self)
def __init__(self, imbalance_upsampling=None, class_weight=None, random_state=1, n_neighbors=5, method=None, log=None): """ :param imbalance_upsampling: Use upsampling to compensate imbalance :param class_weight: Use class_weight to compensate imbalance :param random_state: Random state :param n_neighbors: Number of neighbor samples to use :param method: Ensemble method :param log: Log """ MlModelCommon.__init__(self, imbalance_upsampling=imbalance_upsampling, class_weight=class_weight, method=method, log=log) # # class_weight is not supported for KNN. # if method == "Bagging": model = KNeighborsClassifier(n_neighbors=n_neighbors, metric="minkowski") self.ensemble_method = BaggingClassifier(base_estimator=model, n_estimators=10, random_state=random_state) elif method == "Adaptive Boosting": model = KNeighborsClassifier(n_neighbors=n_neighbors, metric="minkowski") self.ensemble_method = AdaBoostClassifier( base_estimator=model, n_estimators=10, random_state=random_state) else: self.ensemble_method = None KNeighborsClassifier.__init__(self, n_neighbors=n_neighbors, metric="minkowski")
def __init__(self, k = 'all'): if k != None: MutableKNeighborsClassifier.K_features = k KNeighborsClassifier.__init__(self,n_neighbors=7, metric='cosine',algorithm='brute', weights = 'distance')
def __init__(self, data=None, nb_class=2, labels=None, k_neighbors=8): KNeighborsClassifier.__init__(self, n_neighbors=k_neighbors) self.data = data self.labels = labels self.nb_class = nb_class self.k_neighbors = k_neighbors
def __init__(self, pmml, n_jobs=None): PMMLBaseClassifier.__init__(self, pmml) KNeighborsClassifier.__init__(self, n_jobs=n_jobs) PMMLBaseKNN.__init__(self) KNeighborsClassifier.fit(self, self._X, self._y)
def __init__(self, n_neighbors=1, weights='uniform', metric="dtw", metric_params=None, variables_size=1, multivariate_output=None): KNeighborsClassifier.__init__(self, n_neighbors=n_neighbors, weights=weights, algorithm='brute') self.metric = metric self.metric_params = metric_params self.variables_size = variables_size self.multivariate_output = multivariate_output
def __init__(self,k=5): self.k=k KNeighborsClassifier.__init__(self,n_neighbors=k)
def __init__(self,threshold=1,ll_ranking=False,**kwargs): kN.__init__(self,**kwargs) BaseClassifier.__init__(self,threshold=threshold,ll_ranking=ll_ranking)