def get_space(cache):
     space = HyperSpace()
     with space.as_default():
         name_prefix = 'test_'
         filters = 64
         in1 = Input(shape=(
             28,
             28,
             1,
         ))
         in2 = Input(shape=(
             28,
             28,
             1,
         ))
         ic1 = InputChoice([in1, in2], 1)([in1, in2])
         or1 = ModuleChoice([
             sepconv5x5(name_prefix, filters),
             sepconv3x3(name_prefix, filters),
             avgpooling3x3(name_prefix, filters),
             maxpooling3x3(name_prefix, filters),
             identity(name_prefix)
         ])(ic1)
         space.set_inputs([in1, in2])
         space.weights_cache = cache
         return space
Exemple #2
0
 def get_space():
     space = HyperSpace()
     with space.as_default():
         filters = 64
         in1 = Input(shape=(
             28,
             28,
             1,
         ))
         conv_layer(hp_dict, 'normal', 0, [in1, in1], filters, 5)
         space.set_inputs(in1)
         return space
Exemple #3
0
 def get_space():
     space = HyperSpace()
     with space.as_default():
         filters = 64
         in1 = Input(shape=(
             28,
             28,
             1,
         ), dtype='float32')
         conv_node(hp_dict, 'normal', 0, 0, [in1, in1], filters)
         space.set_inputs(in1)
         return space
Exemple #4
0
def enas_micro_search_space(arch='NRNR',
                            input_shape=(28, 28, 1),
                            init_filters=64,
                            node_num=4,
                            data_format=None,
                            classes=10,
                            classification_dropout=0,
                            hp_dict={},
                            use_input_placeholder=True,
                            weights_cache=None):
    space = HyperSpace()
    with space.as_default():
        if use_input_placeholder:
            input = Input(shape=input_shape, name='0_input')
        else:
            input = None
        stem, input = stem_op(input, init_filters, data_format)
        node0 = stem
        node1 = stem
        reduction_no = 0
        normal_no = 0

        for l in arch:
            if l == 'N':
                normal_no += 1
                type = 'normal'
                cell_no = normal_no
                is_reduction = False
            else:
                reduction_no += 1
                type = 'reduction'
                cell_no = reduction_no
                is_reduction = True
            filters = (2**reduction_no) * init_filters

            if is_reduction:
                node0 = FactorizedReduction(
                    filters, f'{normal_no + reduction_no}_{type}_C{cell_no}_0',
                    data_format)(node0)
                node1 = FactorizedReduction(
                    filters, f'{normal_no + reduction_no}_{type}_C{cell_no}_1',
                    data_format)(node1)
            x = conv_layer(hp_dict, f'{normal_no + reduction_no}_{type}',
                           cell_no, [node0, node1], filters, node_num,
                           is_reduction)
            node0 = node1
            node1 = x
        logit = classification(x, classes, classification_dropout, data_format)
        space.set_inputs(input)
        if weights_cache is not None:
            space.weights_cache = weights_cache

    return space
Exemple #5
0
 def get_space():
     space = HyperSpace()
     with space.as_default():
         filters = 64
         in1 = Input(shape=(
             28,
             28,
             1,
         ))
         conv = conv_cell(hp_dict, 'normal', 0, 0, 'L', [in1, in1],
                          filters)
         space.set_inputs([in1, in1])
         space.set_outputs(conv)
         return space
Exemple #6
0
def get_space_p_in_p():
    space = HyperSpace()
    with space.as_default():
        p1 = Pipeline(
            [SimpleImputer(name='imputer1'),
             StandardScaler(name='scaler1')],
            name='p1')
        p2 = Pipeline(
            [SimpleImputer(name='imputer2'),
             StandardScaler(name='scaler2')],
            name='p2')
        input = HyperInput(name='input1')
        p3 = Pipeline([p1, p2], name='p3')(input)
        space.set_inputs(input)
    return space
Exemple #7
0
def get_space_num_cat_pipeline_complex(dataframe_mapper_default=False,
                                       lightgbm_fit_kwargs={},
                                       xgb_fit_kwargs={},
                                       catboost_fit_kwargs={}):
    space = HyperSpace()
    with space.as_default():
        input = HyperInput(name='input1')
        p1 = numeric_pipeline_complex()(input)
        p2 = categorical_pipeline_complex()(input)
        # p2 = categorical_pipeline_simple()(input)
        p3 = DataFrameMapper(default=dataframe_mapper_default,
                             input_df=True,
                             df_out=True,
                             df_out_dtype_transforms=[(column_object, 'int')
                                                      ])([p1, p2])

        lightgbm_init_kwargs = {
            'boosting_type': Choice(['gbdt', 'dart', 'goss']),
            'num_leaves': Choice([11, 31, 101, 301, 501]),
            'learning_rate': Real(0.001, 0.1, step=0.005),
            'n_estimators': 100,
            'max_depth': -1,
            'tree_learner': 'data'  # add for dask
            # subsample_for_bin = 200000, objective = None, class_weight = None,
            #  min_split_gain = 0., min_child_weight = 1e-3, min_child_samples = 20,
        }
        lightgbm_est = LightGBMDaskEstimator(task='binary',
                                             fit_kwargs=lightgbm_fit_kwargs,
                                             **lightgbm_init_kwargs)

        xgb_init_kwargs = {
            'tree_method': 'approx'  # add for dask
        }
        xgb_est = XGBoostDaskEstimator(task='binary',
                                       fit_kwargs=xgb_fit_kwargs,
                                       **xgb_init_kwargs)

        # catboost_init_kwargs = {
        #     'silent': True
        # }
        # catboost_est = CatBoostEstimator(task='binary', fit_kwargs=catboost_fit_kwargs, **catboost_init_kwargs)
        # or_est = ModuleChoice([lightgbm_est, xgb_est, catboost_est], name='estimator_options')(p3)

        or_est = ModuleChoice([lightgbm_est, xgb_est],
                              name='estimator_options')(p3)

        space.set_inputs(input)
    return space
Exemple #8
0
def get_space_column_transformer():
    space = HyperSpace()
    with space.as_default():
        input = HyperInput(name='input1')
        p1 = Pipeline(
            [SimpleImputer(name='imputer1'),
             StandardScaler(name='scaler1')],
            columns=['a', 'b', 'c'],
            name='p1')(input)
        p2 = Pipeline(
            [SimpleImputer(name='imputer2'),
             StandardScaler(name='scaler2')],
            columns=['c', 'd'],
            name='p2')(input)
        p3 = ColumnTransformer()([p1, p2])
        space.set_inputs(input)
    return space
Exemple #9
0
    def __call__(self, *args, **kwargs):
        space = HyperSpace()

        with space.as_default():
            hyper_input = HyperInput(name='input1')

            estimators = []
            if self.enable_dt:
                estimators.append(self.dt)
            if self.enable_dtr:
                estimators.append(self.dtr)
            if self.enable_lr:
                estimators.append(self.lr)
            if self.enable_nn:
                estimators.append(self.nn)

            modules = [ModuleSpace(name=f'{e["cls"].__name__}', **e) for e in estimators]
            outputs = ModuleChoice(modules)(hyper_input)
            space.set_inputs(hyper_input)

        return space