def sample_type(self, sample_type): assert_is_type(sample_type, None, Enum("uniform", "weighted")) self._parms["sample_type"] = sample_type
def histogram_type(self, histogram_type): assert_is_type( histogram_type, None, Enum("auto", "uniform_adaptive", "random", "quantiles_global", "round_robin")) self._parms["histogram_type"] = histogram_type
def metalearner_algorithm(self, metalearner_algorithm): assert_is_type( metalearner_algorithm, None, Enum("auto", "deeplearning", "drf", "gbm", "glm", "naivebayes", "xgboost")) self._parms["metalearner_algorithm"] = metalearner_algorithm
def kernel_type(self, kernel_type): assert_is_type(kernel_type, None, Enum("gaussian")) self._parms["kernel_type"] = kernel_type
def fold_assignment(self, fold_assignment): assert_is_type(fold_assignment, None, Enum("auto", "random", "modulo", "stratified")) self._parms["fold_assignment"] = fold_assignment
def loss(self, loss): assert_is_type( loss, None, Enum("automatic", "cross_entropy", "quadratic", "huber", "absolute", "quantile")) self._parms["loss"] = loss
def ties(self, ties): assert_is_type(ties, None, Enum("efron", "breslow")) self._parms["ties"] = ties
def backend(self, backend): assert_is_type(backend, None, Enum("auto", "gpu", "cpu")) self._parms["backend"] = backend
def loss(self, loss): assert_is_type(loss, None, Enum("quadratic", "absolute", "huber", "poisson", "hinge", "logistic", "periodic")) self._parms["loss"] = loss
def booster(self, booster): assert_is_type(booster, None, Enum("gbtree", "gblinear", "dart")) self._parms["booster"] = booster
def dmatrix_type(self, dmatrix_type): assert_is_type(dmatrix_type, None, Enum("auto", "dense", "sparse")) self._parms["dmatrix_type"] = dmatrix_type
def grow_policy(self, grow_policy): assert_is_type(grow_policy, None, Enum("depthwise", "lossguide")) self._parms["grow_policy"] = grow_policy
def tree_method(self, tree_method): assert_is_type(tree_method, None, Enum("auto", "exact", "approx", "hist")) self._parms["tree_method"] = tree_method
def normalize_type(self, normalize_type): assert_is_type(normalize_type, None, Enum("tree", "forest")) self._parms["normalize_type"] = normalize_type
def activation(self, activation): assert_is_type( activation, None, Enum("tanh", "tanh_with_dropout", "rectifier", "rectifier_with_dropout", "maxout", "maxout_with_dropout")) self._parms["activation"] = activation
def loss_by_col(self, loss_by_col): assert_is_type(loss_by_col, None, [Enum("quadratic", "absolute", "huber", "poisson", "hinge", "logistic", "periodic", "categorical", "ordinal")]) self._parms["loss_by_col"] = loss_by_col
def initial_weight_distribution(self, initial_weight_distribution): assert_is_type(initial_weight_distribution, None, Enum("uniform_adaptive", "uniform", "normal")) self._parms[ "initial_weight_distribution"] = initial_weight_distribution
def multi_loss(self, multi_loss): assert_is_type(multi_loss, None, Enum("categorical", "ordinal")) self._parms["multi_loss"] = multi_loss
def data_leakage_handling(self, data_leakage_handling): assert_is_type(data_leakage_handling, None, Enum("none", "k_fold", "leave_one_out")) self._parms["data_leakage_handling"] = data_leakage_handling
def regularization_y(self, regularization_y): assert_is_type(regularization_y, None, Enum("none", "quadratic", "l2", "l1", "non_negative", "one_sparse", "unit_one_sparse", "simplex")) self._parms["regularization_y"] = regularization_y
def stopping_metric(self, stopping_metric): assert_is_type(stopping_metric, None, Enum("auto", "anomaly_score")) self._parms["stopping_metric"] = stopping_metric
def init(self, init): assert_is_type(init, None, Enum("random", "svd", "plus_plus", "user")) self._parms["init"] = init
def transform(self, transform): assert_is_type( transform, None, Enum("none", "standardize", "normalize", "demean", "descale")) self._parms["transform"] = transform
def svd_method(self, svd_method): assert_is_type(svd_method, None, Enum("gram_s_v_d", "power", "randomized")) self._parms["svd_method"] = svd_method
def distribution(self, distribution): assert_is_type( distribution, None, Enum("auto", "bernoulli", "multinomial", "gaussian", "poisson", "gamma", "tweedie", "laplace", "quantile", "huber")) self._parms["distribution"] = distribution
def score_validation_sampling(self, score_validation_sampling): assert_is_type(score_validation_sampling, None, Enum("uniform", "stratified")) self._parms["score_validation_sampling"] = score_validation_sampling
def categorical_encoding(self, categorical_encoding): assert_is_type( categorical_encoding, None, Enum("auto", "enum", "one_hot_internal", "one_hot_explicit", "binary", "eigen", "label_encoder", "sort_by_response")) self._parms["categorical_encoding"] = categorical_encoding
def missing_values_handling(self, missing_values_handling): assert_is_type(missing_values_handling, None, Enum("mean_imputation", "skip")) self._parms["missing_values_handling"] = missing_values_handling
def metalearner_fold_assignment(self, metalearner_fold_assignment): assert_is_type(metalearner_fold_assignment, None, Enum("auto", "random", "modulo", "stratified")) self._parms[ "metalearner_fold_assignment"] = metalearner_fold_assignment
def setDistribution(self, value): assert_is_type(value, None, Enum("AUTO", "bernoulli", "multinomial", "gaussian", "poisson", "gamma", "tweedie", "laplace", "quantile", "huber")) jvm = H2OContext.getOrCreate(SparkSession.builder.getOrCreate(), verbose=False)._jvm return self._set(distribution=jvm.hex.genmodel.utils.DistributionFamily.valueOf(value))