Ejemplo n.º 1
0
    def __init__(self, options):
        self.handle_options(options)
        params = options.get('params', {})
        out_params = convert_params(
            params,
            strs=['loss', 'max_features'],
            floats=['learning_rate', 'min_weight_fraction_leaf'],
            ints=[
                'n_estimators', 'max_depth', 'min_samples_split',
                'min_samples_leaf', 'max_leaf_nodes', 'random_state'
            ],
        )

        valid_loss = ['deviance', 'exponential']
        if 'loss' in out_params:
            if out_params['loss'] not in valid_loss:
                msg = "loss must be one of: {}".format(', '.join(valid_loss))
                raise RuntimeError(msg)

        if 'max_features' in out_params:
            out_params['max_features'] = handle_max_features(
                out_params['max_features'])

        if 'max_leaf_nodes' in out_params and 'max_depth' in out_params:
            messages.warn('max_depth ignored when max_leaf_nodes is set')

        self.estimator = _GradientBoostingClassifier(**out_params)
Ejemplo n.º 2
0
    def __init__(self, options):
        self.handle_options(options)

        out_params = convert_params(
            options.get('params', {}),
            ints=['random_state', 'n_estimators', 'max_depth',
                  'min_samples_split', 'max_leaf_nodes'],
            strs=['max_features', 'criterion'],
        )

        if 'max_depth' not in out_params:
            out_params.setdefault('max_leaf_nodes', 2000)

        if 'max_features' in out_params:
            out_params['max_features'] = handle_max_features(out_params['max_features'])

        self.estimator = _RandomForestClassifier(class_weight='balanced',
                                                 **out_params)
Ejemplo n.º 3
0
    def __init__(self, options):
        self.handle_options(options)
        params = options.get('params', {})
        out_params = convert_params(
            params,
            strs=['loss', 'max_features'],
            floats=['learning_rate', 'min_weight_fraction_leaf', 'alpha', 'subsample'],
            ints=['n_estimators', 'max_depth', 'min_samples_split',
                  'min_samples_leaf', 'max_leaf_nodes', 'random_state'],
        )

        valid_loss = ['ls', 'lad', 'huber', 'quantile']
        if 'loss' in out_params:
            if out_params['loss'] not in valid_loss:
                msg = "loss must be one of: {}".format(', '.join(valid_loss))
                raise RuntimeError(msg)

        if 'max_features' in out_params:
            out_params['max_features'] = handle_max_features(out_params['max_features'])

        self.estimator = _GradientBoostingRegressor(**out_params)