Ejemplo n.º 1
0
    def test_forbidden_equals_clause(self):
        hp1 = CategoricalHyperparameter("parent", [0, 1])
        hp2 = UniformIntegerHyperparameter("child", 0, 10)

        self.assertRaisesRegexp(TypeError, "HP1' is not of"
            " type <class 'HPOlibConfigSpace.hyperparameters.Hyperparameter'>.",
                                ForbiddenEqualsClause, "HP1", 1)

        self.assertRaisesRegexp(ValueError,
                                "Forbidden clause must be instantiated with a "
                                "legal hyperparameter value for "
                                "'parent, Type: Categorical, Choices: \{0, "
                                "1\}, Default: 0', but got '2'",
                                ForbiddenEqualsClause, hp1, 2)

        forb1 = ForbiddenEqualsClause(hp1, 1)
        forb1_ = ForbiddenEqualsClause(hp1, 1)
        forb1__ = ForbiddenEqualsClause(hp1, 0)
        forb2 = ForbiddenEqualsClause(hp2, 10)

        self.assertEqual(forb1, forb1_)
        self.assertNotEqual(forb1, "forb1")
        self.assertNotEqual(forb1, forb2)
        self.assertNotEqual(forb1__, forb1)
        self.assertEqual("Forbidden: parent == 1", str(forb1))

        self.assertRaisesRegexp(ValueError,
                                "Is_forbidden must be called with the "
                                "instanstatiated hyperparameter in the "
                                "forbidden clause; you are missing "
                                "'parent'", forb1.is_forbidden,
                                [{1: hp2}])
        self.assertFalse(forb1.is_forbidden({'child': 1}, strict=False))
        self.assertFalse(forb1.is_forbidden({'parent': 0}))
        self.assertTrue(forb1.is_forbidden({'parent': 1}))
Ejemplo n.º 2
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        penalty = cs.add_hyperparameter(Constant("penalty", "l1"))
        loss = cs.add_hyperparameter(
            CategoricalHyperparameter("loss", ["hinge", "squared_hinge"],
                                      default="squared_hinge"))
        dual = cs.add_hyperparameter(Constant("dual", "False"))
        # This is set ad-hoc
        tol = cs.add_hyperparameter(
            UniformFloatHyperparameter("tol",
                                       1e-5,
                                       1e-1,
                                       default=1e-4,
                                       log=True))
        C = cs.add_hyperparameter(
            UniformFloatHyperparameter("C",
                                       0.03125,
                                       32768,
                                       log=True,
                                       default=1.0))
        multi_class = cs.add_hyperparameter(Constant("multi_class", "ovr"))
        # These are set ad-hoc
        fit_intercept = cs.add_hyperparameter(Constant("fit_intercept",
                                                       "True"))
        intercept_scaling = cs.add_hyperparameter(
            Constant("intercept_scaling", 1))

        penalty_and_loss = ForbiddenAndConjunction(
            ForbiddenEqualsClause(penalty, "l1"),
            ForbiddenEqualsClause(loss, "hinge"))
        cs.add_forbidden_clause(penalty_and_loss)
        return cs
Ejemplo n.º 3
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        C = cs.add_hyperparameter(UniformFloatHyperparameter(
            "C", 0.03125, 32768, log=True, default=1.0))
        loss = cs.add_hyperparameter(CategoricalHyperparameter(
            "loss", ["epsilon_insensitive", "squared_epsilon_insensitive"],
            default="squared_epsilon_insensitive"))
        # Random Guess
        epsilon = cs.add_hyperparameter(UniformFloatHyperparameter(
            name="epsilon", lower=0.001, upper=1, default=0.1, log=True))
        dual = cs.add_hyperparameter(Constant("dual", "False"))
        # These are set ad-hoc
        tol = cs.add_hyperparameter(UniformFloatHyperparameter(
            "tol", 1e-5, 1e-1, default=1e-4, log=True))
        fit_intercept = cs.add_hyperparameter(Constant("fit_intercept", "True"))
        intercept_scaling = cs.add_hyperparameter(Constant(
            "intercept_scaling", 1))

        dual_and_loss = ForbiddenAndConjunction(
            ForbiddenEqualsClause(dual, "False"),
            ForbiddenEqualsClause(loss, "epsilon_insensitive")
        )
        cs.add_forbidden_clause(dual_and_loss)

        return cs
Ejemplo n.º 4
0
 def test_illegal_default_configuration(self):
     cs = ConfigurationSpace()
     hp1 = CategoricalHyperparameter("loss", ["l1", "l2"], default='l1')
     hp2 = CategoricalHyperparameter("penalty", ["l1", "l2"], default='l1')
     cs.add_hyperparameter(hp1)
     cs.add_hyperparameter(hp2)
     forb1 = ForbiddenEqualsClause(hp1, "l1")
     forb2 = ForbiddenEqualsClause(hp2, "l1")
     forb3 = ForbiddenAndConjunction(forb1, forb2)
     # cs.add_forbidden_clause(forb3)
     self.assertRaisesRegexp(
         ValueError, "Configuration:\n"
         "  loss, Value: l1\n  penalty, Value: l1\n"
         "violates forbidden clause \(Forbidden: loss == l1 && Forbidden: "
         "penalty == l1\)", cs.add_forbidden_clause, forb3)
Ejemplo n.º 5
0
    def test_check_forbidden_with_sampled_vector_configuration(self):
        cs = ConfigurationSpace()
        metric = CategoricalHyperparameter("metric", ["minkowski", "other"])
        cs.add_hyperparameter(metric)

        forbidden = ForbiddenEqualsClause(metric, "other")
        cs.add_forbidden_clause(forbidden)
        configuration = Configuration(cs,
                                      vector=np.ones(1,
                                                     dtype=[('metric', int)]))
        self.assertRaisesRegexp(ValueError, "violates forbidden clause",
                                cs._check_forbidden, configuration)
Ejemplo n.º 6
0
 def test_add_forbidden_clause(self):
     cs = ConfigurationSpace()
     hp1 = CategoricalHyperparameter("input1", [0, 1])
     cs.add_hyperparameter(hp1)
     forb = ForbiddenEqualsClause(hp1, 1)
     # TODO add checking whether a forbidden clause makes sense at all
     cs.add_forbidden_clause(forb)
     # TODO add something to properly retrieve the forbidden clauses
     self.assertEqual(
         str(cs), "Configuration space object:\n  "
         "Hyperparameters:\n    input1, "
         "Type: Categorical, Choices: {0, 1}, "
         "Default: 0\n"
         "  Forbidden Clauses:\n"
         "    Forbidden: input1 == 1\n")
Ejemplo n.º 7
0
    def test_and_conjunction(self):
        hp1 = CategoricalHyperparameter("parent", [0, 1])
        hp2 = UniformIntegerHyperparameter("child", 0, 2)
        hp3 = UniformIntegerHyperparameter("child2", 0, 2)
        hp4 = UniformIntegerHyperparameter("child3", 0, 2)

        forb2 = ForbiddenEqualsClause(hp1, 1)
        forb3 = ForbiddenInClause(hp2, range(2, 3))
        forb4 = ForbiddenInClause(hp3, range(2, 3))
        forb5 = ForbiddenInClause(hp4, range(2, 3))

        and1 = ForbiddenAndConjunction(forb2, forb3)
        and2 = ForbiddenAndConjunction(forb2, forb4)
        and3 = ForbiddenAndConjunction(forb2, forb5)

        total_and = ForbiddenAndConjunction(and1, and2, and3)
        self.assertEqual(
            "((Forbidden: parent == 1 && Forbidden: child in {2}) "
            "&& (Forbidden: parent == 1 && Forbidden: child2 in {2}) "
            "&& (Forbidden: parent == 1 && Forbidden: child3 in "
            "{2}))", str(total_and))

        results = [
            False, False, False, False, False, False, False, False, False,
            False, False, False, False, False, False, False, False, False,
            False, False, False, False, False, False, False, False, False,
            False, False, False, False, False, False, False, False, False,
            False, False, False, False, False, False, False, False, False,
            False, False, False, False, False, False, False, False, True
        ]

        for i, values in enumerate(
                product(range(2), range(3), range(3), range(3))):
            is_forbidden = total_and.is_forbidden({
                "parent": values[0],
                "child": values[1],
                "child2": values[2],
                "child3": values[3]
            })

            self.assertEqual(results[i], is_forbidden)

            self.assertFalse(total_and.is_forbidden([], strict=False))
Ejemplo n.º 8
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        n_clusters = cs.add_hyperparameter(
            UniformIntegerHyperparameter("n_clusters", 2, 400, 25))
        affinity = cs.add_hyperparameter(
            CategoricalHyperparameter("affinity",
                                      ["euclidean", "manhattan", "cosine"],
                                      "euclidean"))
        linkage = cs.add_hyperparameter(
            CategoricalHyperparameter("linkage",
                                      ["ward", "complete", "average"], "ward"))
        pooling_func = cs.add_hyperparameter(
            CategoricalHyperparameter("pooling_func",
                                      ["mean", "median", "max"]))

        affinity_and_linkage = ForbiddenAndConjunction(
            ForbiddenInClause(affinity, ["manhattan", "cosine"]),
            ForbiddenEqualsClause(linkage, "ward"))
        cs.add_forbidden_clause(affinity_and_linkage)
        return cs
Ejemplo n.º 9
0
    def test_add_configuration_space(self):
        cs = ConfigurationSpace()
        hp1 = cs.add_hyperparameter(CategoricalHyperparameter(
            "input1", [0, 1]))
        forb1 = cs.add_forbidden_clause(ForbiddenEqualsClause(hp1, 1))
        hp2 = cs.add_hyperparameter(
            UniformIntegerHyperparameter("child", 0, 10))
        cond = cs.add_condition(EqualsCondition(hp2, hp1, 0))
        cs2 = ConfigurationSpace()
        cs2.add_configuration_space('prefix', cs, delimiter='__')
        self.assertEqual(
            str(cs2), '''Configuration space object:
  Hyperparameters:
    prefix__child, Type: UniformInteger, Range: [0, 10], Default: 5
    prefix__input1, Type: Categorical, Choices: {0, 1}, Default: 0
  Conditions:
    prefix__child | prefix__input1 == 0
  Forbidden Clauses:
    Forbidden: prefix__input1 == 1
''')
Ejemplo n.º 10
0
    def test_check_configuration2(self):
        # Test that hyperparameters which are not active must not be set and
        # that evaluating forbidden clauses does not choke on missing
        # hyperparameters
        cs = ConfigurationSpace()
        classifier = CategoricalHyperparameter(
            "classifier", ["k_nearest_neighbors", "extra_trees"])
        metric = CategoricalHyperparameter("metric", ["minkowski", "other"])
        p = CategoricalHyperparameter("k_nearest_neighbors:p", [1, 2])
        metric_depends_on_classifier = EqualsCondition(metric, classifier,
                                                       "k_nearest_neighbors")
        p_depends_on_metric = EqualsCondition(p, metric, "minkowski")
        cs.add_hyperparameter(metric)
        cs.add_hyperparameter(p)
        cs.add_hyperparameter(classifier)
        cs.add_condition(metric_depends_on_classifier)
        cs.add_condition(p_depends_on_metric)

        forbidden = ForbiddenEqualsClause(metric, "other")
        cs.add_forbidden_clause(forbidden)

        configuration = Configuration(cs, dict(classifier="extra_trees"))
Ejemplo n.º 11
0
    def test_forbidden_equals_clause(self):
        hp1 = CategoricalHyperparameter("parent", [0, 1])
        hp2 = UniformIntegerHyperparameter("child", 0, 10)

        self.assertRaisesRegexp(
            TypeError, "HP1' is not of"
            " type <class 'HPOlibConfigSpace.hyperparameters.Hyperparameter'>.",
            ForbiddenEqualsClause, "HP1", 1)

        self.assertRaisesRegexp(
            ValueError, "Forbidden clause must be instantiated with a "
            "legal hyperparameter value for "
            "'parent, Type: Categorical, Choices: \{0, "
            "1\}, Default: 0', but got '2'", ForbiddenEqualsClause, hp1, 2)

        forb1 = ForbiddenEqualsClause(hp1, 1)
        forb1_ = ForbiddenEqualsClause(hp1, 1)
        forb1__ = ForbiddenEqualsClause(hp1, 0)
        forb2 = ForbiddenEqualsClause(hp2, 10)

        self.assertEqual(forb1, forb1_)
        self.assertNotEqual(forb1, "forb1")
        self.assertNotEqual(forb1, forb2)
        self.assertNotEqual(forb1__, forb1)
        self.assertEqual("Forbidden: parent == 1", str(forb1))

        self.assertRaisesRegexp(
            ValueError, "Is_forbidden must be called with the "
            "instanstatiated hyperparameter in the "
            "forbidden clause; you are missing "
            "'parent'", forb1.is_forbidden, [{
                1: hp2
            }])
        self.assertFalse(forb1.is_forbidden({'child': 1}, strict=False))
        self.assertFalse(forb1.is_forbidden({'parent': 0}))
        self.assertTrue(forb1.is_forbidden({'parent': 1}))
Ejemplo n.º 12
0
    def get_hyperparameter_search_space(cls, include=None, exclude=None,
                                        dataset_properties=None):
        """Create the hyperparameter configuration space.

        Parameters
        ----------
        include : dict (optional, default=None)

        Returns
        -------
        """
        cs = ConfigurationSpace()

        if dataset_properties is None or not isinstance(dataset_properties, dict):
            dataset_properties = dict()
        if not 'target_type' in dataset_properties:
            dataset_properties['target_type'] = 'classification'
        if dataset_properties['target_type'] != 'classification':
            dataset_properties['target_type'] = 'classification'

        pipeline = cls._get_pipeline()
        cs = cls._get_hyperparameter_search_space(cs, dataset_properties,
                                                  exclude, include, pipeline)

        classifiers = cs.get_hyperparameter('classifier:__choice__').choices
        preprocessors = cs.get_hyperparameter('preprocessor:__choice__').choices
        available_classifiers = pipeline[-1][1].get_available_components(
            dataset_properties)
        available_preprocessors = pipeline[-2][1].get_available_components(
            dataset_properties)

        possible_default_classifier = copy.copy(list(
            available_classifiers.keys()))
        default = cs.get_hyperparameter('classifier:__choice__').default
        del possible_default_classifier[possible_default_classifier.index(default)]

        # A classifier which can handle sparse data after the densifier is
        # forbidden for memory issues
        for key in classifiers:
            if SPARSE in available_classifiers[key].get_properties()['input']:
                if 'densifier' in preprocessors:
                    while True:
                        try:
                            cs.add_forbidden_clause(
                                ForbiddenAndConjunction(
                                    ForbiddenEqualsClause(
                                        cs.get_hyperparameter(
                                            'classifier:__choice__'), key),
                                    ForbiddenEqualsClause(
                                        cs.get_hyperparameter(
                                            'preprocessor:__choice__'), 'densifier')
                                ))
                            # Success
                            break
                        except ValueError:
                            # Change the default and try again
                            try:
                                default = possible_default_classifier.pop()
                            except IndexError:
                                raise ValueError("Cannot find a legal default configuration.")
                            cs.get_hyperparameter(
                                'classifier:__choice__').default = default

        # which would take too long
        # Combinations of non-linear models with feature learning:
        classifiers_ = ["adaboost", "decision_tree", "extra_trees",
                        "gradient_boosting", "k_nearest_neighbors",
                        "libsvm_svc", "random_forest", "gaussian_nb",
                        "decision_tree"]
        feature_learning = ["kitchen_sinks", "nystroem_sampler"]

        for c, f in product(classifiers_, feature_learning):
            if c not in classifiers:
                continue
            if f not in preprocessors:
                continue
            while True:
                try:
                    cs.add_forbidden_clause(ForbiddenAndConjunction(
                        ForbiddenEqualsClause(cs.get_hyperparameter(
                            "classifier:__choice__"), c),
                        ForbiddenEqualsClause(cs.get_hyperparameter(
                            "preprocessor:__choice__"), f)))
                    break
                except KeyError:
                    break
                except ValueError as e:
                    # Change the default and try again
                    try:
                        default = possible_default_classifier.pop()
                    except IndexError:
                        raise ValueError(
                            "Cannot find a legal default configuration.")
                    cs.get_hyperparameter(
                        'classifier:__choice__').default = default

        # Won't work
        # Multinomial NB etc don't use with features learning, pca etc
        classifiers_ = ["multinomial_nb"]
        preproc_with_negative_X = ["kitchen_sinks", "pca", "truncatedSVD",
                                   "fast_ica", "kernel_pca", "nystroem_sampler"]

        for c, f in product(classifiers_, preproc_with_negative_X):
            if c not in classifiers:
                continue
            if f not in preprocessors:
                continue
            while True:
                try:
                    cs.add_forbidden_clause(ForbiddenAndConjunction(
                        ForbiddenEqualsClause(cs.get_hyperparameter(
                            "preprocessor:__choice__"), f),
                        ForbiddenEqualsClause(cs.get_hyperparameter(
                            "classifier:__choice__"), c)))
                    break
                except KeyError:
                    break
                except ValueError:
                    # Change the default and try again
                    try:
                        default = possible_default_classifier.pop()
                    except IndexError:
                        raise ValueError(
                            "Cannot find a legal default configuration.")
                    cs.get_hyperparameter(
                        'classifier:__choice__').default = default

        return cs
Ejemplo n.º 13
0
    def get_hyperparameter_search_space(cls,
                                        include=None,
                                        exclude=None,
                                        dataset_properties=None):
        """Return the configuration space for the CASH problem.

        Parameters
        ----------
        include_estimators : list of str
            If include_estimators is given, only the regressors specified
            are used. Specify them by their module name; e.g., to include
            only the SVM use :python:`include_regressors=['svr']`.
            Cannot be used together with :python:`exclude_regressors`.

        exclude_estimators : list of str
            If exclude_estimators is given, only the regressors specified
            are used. Specify them by their module name; e.g., to include
            all regressors except the SVM use
            :python:`exclude_regressors=['svr']`.
            Cannot be used together with :python:`include_regressors`.

        include_preprocessors : list of str
            If include_preprocessors is given, only the preprocessors specified
            are used. Specify them by their module name; e.g., to include
            only the PCA use :python:`include_preprocessors=['pca']`.
            Cannot be used together with :python:`exclude_preprocessors`.

        exclude_preprocessors : list of str
            If include_preprocessors is given, only the preprocessors specified
            are used. Specify them by their module name; e.g., to include
            all preprocessors except the PCA use
            :python:`exclude_preprocessors=['pca']`.
            Cannot be used together with :python:`include_preprocessors`.

        Returns
        -------
        cs : HPOlibConfigSpace.configuration_space.Configuration
            The configuration space describing the SimpleRegressionClassifier.
        """
        cs = ConfigurationSpace()

        if dataset_properties is None or not isinstance(
                dataset_properties, dict):
            dataset_properties = dict()
        if not 'target_type' in dataset_properties:
            dataset_properties['target_type'] = 'regression'
        if dataset_properties['target_type'] != 'regression':
            dataset_properties['target_type'] = 'regression'

        if 'sparse' not in dataset_properties:
            # This dataset is probaby dense
            dataset_properties['sparse'] = False

        pipeline = cls._get_pipeline()
        cs = cls._get_hyperparameter_search_space(cs, dataset_properties,
                                                  exclude, include, pipeline)

        regressors = cs.get_hyperparameter('regressor:__choice__').choices
        preprocessors = cs.get_hyperparameter(
            'preprocessor:__choice__').choices
        available_regressors = pipeline[-1][1].get_available_components(
            dataset_properties)
        available_preprocessors = pipeline[-2][1].get_available_components(
            dataset_properties)

        possible_default_regressor = copy.copy(
            list(available_regressors.keys()))
        default = cs.get_hyperparameter('regressor:__choice__').default
        del possible_default_regressor[possible_default_regressor.index(
            default)]

        # A regressor which can handle sparse data after the densifier
        for key in regressors:
            if SPARSE in available_regressors[key].get_properties(
                    dataset_properties=None)['input']:
                if 'densifier' in preprocessors:
                    while True:
                        try:
                            cs.add_forbidden_clause(
                                ForbiddenAndConjunction(
                                    ForbiddenEqualsClause(
                                        cs.get_hyperparameter(
                                            'regressor:__choice__'), key),
                                    ForbiddenEqualsClause(
                                        cs.get_hyperparameter(
                                            'preprocessor:__choice__'),
                                        'densifier')))
                            break
                        except ValueError:
                            # Change the default and try again
                            try:
                                default = possible_default_regressor.pop()
                            except IndexError:
                                raise ValueError(
                                    "Cannot find a legal default configuration."
                                )
                            cs.get_hyperparameter(
                                'regressor:__choice__').default = default

        # which would take too long
        # Combinations of tree-based models with feature learning:
        regressors_ = [
            "adaboost", "decision_tree", "extra_trees", "gaussian_process",
            "gradient_boosting", "k_nearest_neighbors", "random_forest"
        ]
        feature_learning_ = ["kitchen_sinks", "kernel_pca", "nystroem_sampler"]

        for r, f in product(regressors_, feature_learning_):
            if r not in regressors:
                continue
            if f not in preprocessors:
                continue
            while True:
                try:
                    cs.add_forbidden_clause(
                        ForbiddenAndConjunction(
                            ForbiddenEqualsClause(
                                cs.get_hyperparameter("regressor:__choice__"),
                                r),
                            ForbiddenEqualsClause(
                                cs.get_hyperparameter(
                                    "preprocessor:__choice__"), f)))
                    break
                except KeyError:
                    break
                except ValueError:
                    # Change the default and try again
                    try:
                        default = possible_default_regressor.pop()
                    except IndexError:
                        raise ValueError(
                            "Cannot find a legal default configuration.")
                    cs.get_hyperparameter(
                        'regressor:__choice__').default = default

        return cs
Ejemplo n.º 14
0
def write(configuration_space):
    if not isinstance(configuration_space, ConfigurationSpace):
        raise TypeError("pcs_parser.write expects an instance of %s, "
                        "you provided '%s'" %
                        (ConfigurationSpace, type(configuration_space)))

    param_lines = six.StringIO()
    condition_lines = six.StringIO()
    forbidden_lines = []
    for hyperparameter in configuration_space.get_hyperparameters():
        # Check if the hyperparameter names are valid SMAC names!
        try:
            pp_param_name.parseString(hyperparameter.name)
        except pyparsing.ParseException:
            raise ValueError("Illegal hyperparameter name for SMAC: %s" %
                             hyperparameter.name)

        # First build params
        if param_lines.tell() > 0:
            param_lines.write("\n")
        if isinstance(hyperparameter, NumericalHyperparameter):
            param_lines.write(build_continuous(hyperparameter))
        elif isinstance(hyperparameter, CategoricalHyperparameter):
            param_lines.write(build_categorical(hyperparameter))
        elif isinstance(hyperparameter, Constant):
            param_lines.write(build_constant(hyperparameter))
        else:
            raise TypeError("Unknown type: %s (%s)" %
                            (type(hyperparameter), hyperparameter))

    for condition in configuration_space.get_conditions():
        if condition_lines.tell() > 0:
            condition_lines.write("\n")
        condition_lines.write(build_condition(condition))

    for forbidden_clause in configuration_space.forbidden_clauses:
        # Convert in-statement into two or more equals statements
        dlcs = forbidden_clause.get_descendant_literal_clauses()
        # First, get all in statements and convert them to equal statements
        in_statements = []
        other_statements = []
        for dlc in dlcs:
            if isinstance(dlc, MultipleValueForbiddenClause):
                if not isinstance(dlc, ForbiddenInClause):
                    raise ValueError("SMAC cannot handle this forbidden "
                                     "clause: %s" % dlc)
                in_statements.append([
                    ForbiddenEqualsClause(dlc.hyperparameter, value)
                    for value in dlc.values
                ])
            else:
                other_statements.append(dlc)

        # Second, create the product of all elements in the IN statements,
        # create a ForbiddenAnd and add all ForbiddenEquals
        if len(in_statements) > 0:
            for i, p in enumerate(product(*in_statements)):
                all_forbidden_clauses = list(p) + other_statements
                f = ForbiddenAndConjunction(*all_forbidden_clauses)
                forbidden_lines.append(build_forbidden(f))
        else:
            forbidden_lines.append(build_forbidden(forbidden_clause))

    if condition_lines.tell() > 0:
        condition_lines.seek(0)
        param_lines.write("\n\n")
        for line in condition_lines:
            param_lines.write(line)

    if len(forbidden_lines) > 0:
        forbidden_lines.sort()
        param_lines.write("\n\n")
        for line in forbidden_lines:
            param_lines.write(line)
            param_lines.write("\n")

    # Check if the default configuration is a valid configuration!

    param_lines.seek(0)
    return param_lines.getvalue()
Ejemplo n.º 15
0
def read(pcs_string, debug=False):
    configuration_space = ConfigurationSpace()
    conditions = []
    forbidden = []

    # some statistics
    ct = 0
    cont_ct = 0
    cat_ct = 0
    line_ct = 0

    for line in pcs_string:
        line_ct += 1

        if "#" in line:
            # It contains a comment
            pos = line.find("#")
            line = line[:pos]

        # Remove quotes and whitespaces at beginning and end
        line = line.replace('"', "").replace("'", "")
        line = line.strip()

        if "|" in line:
            # It's a condition
            try:
                c = pp_condition.parseString(line)
                conditions.append(c)
            except pyparsing.ParseException:
                raise NotImplementedError("Could not parse condition: %s" %
                                          line)
            continue
        if "}" not in line and "]" not in line:
            print("Skipping: %s" % line)
            continue
        if line.startswith("{") and line.endswith("}"):
            forbidden.append(line)
            continue
        if len(line.strip()) == 0:
            continue

        ct += 1
        param = None
        # print "Parsing: " + line

        create = {
            "int": UniformIntegerHyperparameter,
            "float": UniformFloatHyperparameter,
            "categorical": CategoricalHyperparameter
        }

        try:
            param_list = pp_cont_param.parseString(line)
            il = param_list[9:]
            if len(il) > 0:
                il = il[0]
            param_list = param_list[:9]
            name = param_list[0]
            lower = float(param_list[2])
            upper = float(param_list[4])
            paramtype = "int" if "i" in il else "float"
            log = True if "l" in il else False
            default = float(param_list[7])
            param = create[paramtype](name=name,
                                      lower=lower,
                                      upper=upper,
                                      q=None,
                                      log=log,
                                      default=default)
            cont_ct += 1
        except pyparsing.ParseException:
            pass

        try:
            param_list = pp_cat_param.parseString(line)
            name = param_list[0]
            choices = [c for c in param_list[2:-4:2]]
            default = param_list[-2]
            param = create["categorical"](name=name,
                                          choices=choices,
                                          default=default)
            cat_ct += 1
        except pyparsing.ParseException:
            pass

        if param is None:
            raise NotImplementedError("Could not parse: %s" % line)

        configuration_space.add_hyperparameter(param)

    for clause in forbidden:
        # TODO test this properly!
        # TODO Add a try/catch here!
        # noinspection PyUnusedLocal
        param_list = pp_forbidden_clause.parseString(clause)
        tmp_list = []
        clause_list = []
        for value in param_list[1:]:
            if len(tmp_list) < 3:
                tmp_list.append(value)
            else:
                # So far, only equals is supported by SMAC
                if tmp_list[1] == '=':
                    # TODO maybe add a check if the hyperparameter is
                    # actually in the configuration space
                    clause_list.append(
                        ForbiddenEqualsClause(
                            configuration_space.get_hyperparameter(
                                tmp_list[0]), tmp_list[2]))
                else:
                    raise NotImplementedError()
                tmp_list = []
        configuration_space.add_forbidden_clause(
            ForbiddenAndConjunction(*clause_list))

    #Now handle conditions
    # If there are two conditions for one child, these two conditions are an
    # AND-conjunction of conditions, thus we have to connect them
    conditions_per_child = defaultdict(list)
    for condition in conditions:
        child_name = condition[0]
        conditions_per_child[child_name].append(condition)

    for child_name in conditions_per_child:
        condition_objects = []
        for condition in conditions_per_child[child_name]:
            child = configuration_space.get_hyperparameter(child_name)
            parent_name = condition[2]
            parent = configuration_space.get_hyperparameter(parent_name)
            restrictions = condition[5:-1:2]

            # TODO: cast the type of the restriction!
            if len(restrictions) == 1:
                condition = EqualsCondition(child, parent, restrictions[0])
            else:
                condition = InCondition(child, parent, values=restrictions)
            condition_objects.append(condition)

        # Now we have all condition objects for this child, so we can build a
        #  giant AND-conjunction of them (if number of conditions >= 2)!
        if len(condition_objects) > 1:
            and_conjunction = AndConjunction(*condition_objects)
            configuration_space.add_condition(and_conjunction)
        else:
            configuration_space.add_condition(condition_objects[0])

    return configuration_space
Ejemplo n.º 16
0
    def get_hyperparameter_search_space(cls,
                                        include_estimators=None,
                                        exclude_estimators=None,
                                        include_preprocessors=None,
                                        exclude_preprocessors=None,
                                        dataset_properties=None):
        """Return the configuration space for the CASH problem.

        Parameters
        ----------
        include_estimators : list of str
            If include_estimators is given, only the regressors specified
            are used. Specify them by their module name; e.g., to include
            only the SVM use :python:`include_regressors=['svr']`.
            Cannot be used together with :python:`exclude_regressors`.

        exclude_estimators : list of str
            If exclude_estimators is given, only the regressors specified
            are used. Specify them by their module name; e.g., to include
            all regressors except the SVM use
            :python:`exclude_regressors=['svr']`.
            Cannot be used together with :python:`include_regressors`.

        include_preprocessors : list of str
            If include_preprocessors is given, only the preprocessors specified
            are used. Specify them by their module name; e.g., to include
            only the PCA use :python:`include_preprocessors=['pca']`.
            Cannot be used together with :python:`exclude_preprocessors`.

        exclude_preprocessors : list of str
            If include_preprocessors is given, only the preprocessors specified
            are used. Specify them by their module name; e.g., to include
            all preprocessors except the PCA use
            :python:`exclude_preprocessors=['pca']`.
            Cannot be used together with :python:`include_preprocessors`.

        Returns
        -------
        cs : HPOlibConfigSpace.configuration_space.Configuration
            The configuration space describing the AutoSklearnClassifier.
        """
        if include_estimators is not None and exclude_estimators is not None:
            raise ValueError("The arguments include_estimators and "
                             "exclude_regressors cannot be used together.")

        if include_preprocessors is not None and exclude_preprocessors is not None:
            raise ValueError("The arguments include_preprocessors and "
                             "exclude_preprocessors cannot be used together.")

        if dataset_properties is None or not isinstance(
                dataset_properties, dict):
            dataset_properties = dict()

        # Compile a list of all estimator objects for this problem
        available_regressors = AutoSklearnRegressor._get_estimator_components()

        # We assume that there exists only a single regression task. which
        # is different to classification where we have multiclass,
        # multilabel, etc
        regressors = dict()
        for name in available_regressors:
            if include_estimators is not None and \
                            name not in include_estimators:
                continue
            elif exclude_estimators is not None and \
                            name in exclude_estimators:
                continue
            if dataset_properties.get('sparse') is True and \
                    available_regressors[name].get_properties()[
                        'handles_sparse'] is False:
                continue
            regressors[name] = available_regressors[name]

        if len(regressors) == 0:
            raise ValueError("No regressors to build a configuration space "
                             "for...")

        # Hardcode the defaults based on some educated guesses
        classifier_defaults = [
            'random_forest', 'liblinear', 'sgd', 'libsvm_svc'
        ]
        regressor_default = None
        for cd_ in classifier_defaults:
            if cd_ in regressors:
                regressor_default = cd_
                break
        if regressor_default is None:
            regressor_default = regressors.keys()[0]

        # Compile a list of preprocessor for this problem
        available_preprocessors = \
            components.preprocessing_components._preprocessors

        preprocessors = dict()
        for name in available_preprocessors:
            if name in AutoSklearnRegressor._pipeline:
                preprocessors[name] = available_preprocessors[name]
                continue
            elif include_preprocessors is not None and \
                            name not in include_preprocessors:
                continue
            elif exclude_preprocessors is not None and \
                            name in exclude_preprocessors:
                continue

            if dataset_properties.get('sparse') is True and \
                    available_preprocessors[name].get_properties()[
                                'handles_sparse'] is False:
                continue
            elif dataset_properties.get('sparse') is False and \
                    available_preprocessors[name].get_properties()[
                                'handles_dense'] is False:
                continue
            elif available_preprocessors[name]. \
                    get_properties()['handles_regression'] is False:
                continue

            preprocessors[name] = available_preprocessors[name]

        # Get the configuration space
        configuration_space = super(AutoSklearnRegressor, cls).\
            get_hyperparameter_search_space(
            cls._get_estimator_hyperparameter_name(),
            regressor_default, regressors, preprocessors, dataset_properties,
            cls._pipeline, )

        # And now add forbidden parameter configurations which would take too
        # long

        # Combinations of tree-based models with feature learning:
        regressors_ = [
            "random_forest", "gradient_boosting", "gaussian_process"
        ]
        feature_learning_ = ["kitchen_sinks", "sparse_filtering"]

        for c, f in product(regressors_, feature_learning_):
            try:
                configuration_space.add_forbidden_clause(
                    ForbiddenAndConjunction(
                        ForbiddenEqualsClause(
                            configuration_space.get_hyperparameter(
                                "regressor"), c),
                        ForbiddenEqualsClause(
                            configuration_space.get_hyperparameter(
                                "preprocessor"), f)))
            except:
                pass

        return configuration_space
Ejemplo n.º 17
0
    def get_hyperparameter_search_space(cls,
                                        include_estimators=None,
                                        exclude_estimators=None,
                                        include_preprocessors=None,
                                        exclude_preprocessors=None,
                                        dataset_properties=None):

        if include_estimators is not None and exclude_estimators is not None:
            raise ValueError("The arguments include_estimators and "
                             "exclude_estimators cannot be used together.")

        if include_preprocessors is not None and exclude_preprocessors is not None:
            raise ValueError("The arguments include_preprocessors and "
                             "exclude_preprocessors cannot be used together.")

        if dataset_properties is None or not isinstance(
                dataset_properties, dict):
            dataset_properties = dict()

        # Compile a list of all estimator objects for this problem
        available_classifiers = AutoSklearnClassifier._get_estimator_components(
        )

        classifiers = dict()
        for name in available_classifiers:
            if include_estimators is not None and \
                            name not in include_estimators:
                continue
            elif exclude_estimators is not None and \
                            name in exclude_estimators:
                continue

            if dataset_properties.get('multiclass') is True and \
                    available_classifiers[name].get_properties()[
                        'handles_multiclass'] is False:
                continue
            if dataset_properties.get('multilabel') is True and \
                    available_classifiers[name].get_properties()[
                        'handles_multilabel'] is False:
                continue
            if dataset_properties.get('sparse') is True and \
                    available_classifiers[name].get_properties()[
                        'handles_sparse'] is False:
                continue
            classifiers[name] = available_classifiers[name]

        if len(classifiers) == 0:
            raise ValueError("No classifier to build a configuration space "
                             "for...")

        # Hardcode the defaults based on some educated guesses
        classifier_defaults = [
            'random_forest', 'liblinear', 'sgd', 'libsvm_svc'
        ]
        classifier_default = None
        for cd_ in classifier_defaults:
            if cd_ in classifiers:
                classifier_default = cd_
                break
        if classifier_default is None:
            classifier_default = classifiers.keys()[0]

        # Compile a list of preprocessor for this problem
        available_preprocessors = \
            components.preprocessing_components._preprocessors

        preprocessors = dict()
        for name in available_preprocessors:
            if name in cls._get_pipeline():
                preprocessors[name] = available_preprocessors[name]
                continue
            elif include_preprocessors is not None and \
                            name not in include_preprocessors:
                continue
            elif exclude_preprocessors is not None and \
                            name in exclude_preprocessors:
                continue

            if available_preprocessors[name]. \
                    get_properties()['handles_classification'] is False:
                continue
            if dataset_properties.get('multiclass') is True and \
                    available_preprocessors[name].get_properties()[
                                'handles_multiclass'] is False:
                continue
            if dataset_properties.get('multilabel') is True and \
                    available_preprocessors[name].get_properties()[
                                'handles_multilabel'] is False:
                continue
            if dataset_properties.get('sparse') is True and \
                    available_preprocessors[name].get_properties()[
                                'handles_sparse'] is False:
                continue
            elif dataset_properties.get('sparse') is False and \
                    available_preprocessors[name].get_properties()[
                                'handles_dense'] is False:
                continue

            preprocessors[name] = available_preprocessors[name]

        # Get the configuration space
        configuration_space = super(AutoSklearnClassifier, cls)\
            .get_hyperparameter_search_space(
            cls._get_estimator_hyperparameter_name(),
            classifier_default, classifiers, preprocessors, dataset_properties,
            cls._get_pipeline())

        # And now add forbidden parameter configurations which would take too
        # long

        # Combinations of tree-based models with feature learning:
        classifiers_ = [
            "extra_trees", "gradient_boosting", "k_nearest_neighbors",
            "libsvm_svc", "random_forest"
        ]
        feature_learning_ = ["kitchen_sinks", "sparse_filtering"]

        for c, f in product(classifiers_, feature_learning_):
            try:
                configuration_space.add_forbidden_clause(
                    ForbiddenAndConjunction(
                        ForbiddenEqualsClause(
                            configuration_space.get_hyperparameter(
                                "classifier"), c),
                        ForbiddenEqualsClause(
                            configuration_space.get_hyperparameter(
                                "preprocessor"), f)))
            except:
                pass

        return configuration_space
Ejemplo n.º 18
0
    def get_hyperparameter_search_space(cls, estimator_name,
                                         default_estimator,
                                         estimator_components,
                                         preprocessor_components,
                                         dataset_properties,
                                         always_active):
        """Return the configuration space for the CASH problem.

        This method should be called by the method
        get_hyperparameter_search_space of a subclass. After the subclass
        assembles a list of available estimators and preprocessor components,
        _get_hyperparameter_search_space can be called to do the work of
        creating the actual
        HPOlibConfigSpace.configuration_space.ConfigurationSpace object.

        Parameters
        ----------
        estimator_name : str
            Name of the estimator hyperparameter which will be used in the
            configuration space. For a classification task, this would be
            'classifier'.

        estimator_components : dict {name: component}
            Dictionary with all estimator components to be included in the
            configuration space.

        preprocessor_components : dict {name: component}
            Dictionary with all preprocessor components to be included in the
            configuration space. .

        always_active : list of str
            A list of components which will always be active in the pipeline.
            This is useful for components like imputation which have
            hyperparameters to be configured, but which do not have any parent.

        default_estimator : str
            Default value for the estimator hyperparameter.

        Returns
        -------
        cs : HPOlibConfigSpace.configuration_space.Configuration
            The configuration space describing the AutoSklearnClassifier.

        """

        cs = ConfigurationSpace()

        available_estimators = estimator_components
        available_preprocessors = preprocessor_components

        if default_estimator is None:
            default_estimator = available_estimators.keys()[0]

        estimator = CategoricalHyperparameter(estimator_name,
            available_estimators.keys(), default=default_estimator)
        cs.add_hyperparameter(estimator)
        for name in available_estimators.keys():

            # We have to retrieve the configuration space every time because
            # we change the objects it returns. If we reused it, we could not
            # retrieve the conditions further down
            # TODO implement copy for hyperparameters and forbidden and
            # conditions!

            estimator_configuration_space = available_estimators[name]. \
                get_hyperparameter_search_space(dataset_properties)
            for parameter in estimator_configuration_space.get_hyperparameters():
                new_parameter = copy.deepcopy(parameter)
                new_parameter.name = "%s:%s" % (name, new_parameter.name)
                cs.add_hyperparameter(new_parameter)
                # We must only add a condition if the hyperparameter is not
                # conditional on something else
                if len(estimator_configuration_space.
                        get_parents_of(parameter)) == 0:
                    condition = EqualsCondition(new_parameter, estimator, name)
                    cs.add_condition(condition)

            for condition in available_estimators[name]. \
                    get_hyperparameter_search_space(dataset_properties).get_conditions():
                dlcs = condition.get_descendant_literal_conditions()
                for dlc in dlcs:
                    if not dlc.child.name.startswith(name):
                        dlc.child.name = "%s:%s" % (name, dlc.child.name)
                    if not dlc.parent.name.startswith(name):
                        dlc.parent.name = "%s:%s" % (name, dlc.parent.name)
                cs.add_condition(condition)

            for forbidden_clause in available_estimators[name]. \
                    get_hyperparameter_search_space(dataset_properties).forbidden_clauses:
                dlcs = forbidden_clause.get_descendant_literal_clauses()
                for dlc in dlcs:
                    if not dlc.hyperparameter.name.startswith(name):
                        dlc.hyperparameter.name = "%s:%s" % (name,
                                                             dlc.hyperparameter.name)
                cs.add_forbidden_clause(forbidden_clause)

        preprocessor_choices = filter(lambda app: app not in always_active,
                                      available_preprocessors.keys())
        preprocessor = CategoricalHyperparameter("preprocessor",
            ["None"] + preprocessor_choices, default='None')
        cs.add_hyperparameter(preprocessor)
        for name in available_preprocessors.keys():
            preprocessor_configuration_space = available_preprocessors[name]. \
                get_hyperparameter_search_space(dataset_properties)
            for parameter in preprocessor_configuration_space.get_hyperparameters():
                new_parameter = copy.deepcopy(parameter)
                new_parameter.name = "%s:%s" % (name, new_parameter.name)
                cs.add_hyperparameter(new_parameter)
                # We must only add a condition if the hyperparameter is not
                # conditional on something else
                if len(preprocessor_configuration_space.
                        get_parents_of(
                        parameter)) == 0 and name not in always_active:
                    condition = EqualsCondition(new_parameter, preprocessor,
                                                name)
                    cs.add_condition(condition)

            for condition in available_preprocessors[name]. \
                    get_hyperparameter_search_space(dataset_properties).get_conditions():
                dlcs = condition.get_descendent_literal_conditions()
                for dlc in dlcs:
                    if not dlc.child.name.startswith(name):
                        dlc.child.name = "%s:%s" % (name, dlc.child.name)
                    if not dlc.parent.name.startswith(name):
                        dlc.parent.name = "%s:%s" % (name, dlc.parent.name)
                cs.add_condition(condition)

            for forbidden_clause in available_preprocessors[name]. \
                    get_hyperparameter_search_space(dataset_properties).forbidden_clauses:
                dlcs = forbidden_clause.get_descendant_literal_clauses()
                for dlc in dlcs:
                    if not dlc.hyperparameter.startwith(name):
                        dlc.hyperparameter.name = "%s:%s" % (name,
                                                             dlc.hyperparameter.name)
                cs.add_forbidden_clause(forbidden_clause)

        # Now try to add things for which we know that they don't work
        try:
            cs.add_forbidden_clause(ForbiddenAndConjunction(
                ForbiddenEqualsClause(cs.get_hyperparameter(
                    "select_percentile_classification:score_func"), "chi2"),
                ForbiddenEqualsClause(cs.get_hyperparameter(
                    "rescaling:strategy"), "standard")
            ))
        except:
            pass

        return cs
def add_forbidden(conf_space, pipeline, matches, dataset_properties, include,
                  exclude):
    # Not sure if this works for 3D
    node_i_is_choice = []
    node_i_choices_names = []
    node_i_choices = []
    all_nodes = []
    for node_name, node in pipeline:
        all_nodes.append(node)
        is_choice = hasattr(node, "get_available_components")
        node_i_is_choice.append(is_choice)

        node_include = include.get(node_name) if include is not None else None
        node_exclude = exclude.get(node_name) if exclude is not None else None

        if is_choice:
            node_i_choices_names.append(
                node.get_available_components(dataset_properties,
                                              include=node_include,
                                              exclude=node_exclude).keys())
            node_i_choices.append(
                node.get_available_components(dataset_properties,
                                              include=node_include,
                                              exclude=node_exclude).values())

        else:
            node_i_choices_names.append([node_name])
            node_i_choices.append([node])

    # Find out all chains of choices. Only in such a chain its possible to
    # have several forbidden constraints
    choices_chains = []
    idx = 0
    while idx < len(pipeline):
        if node_i_is_choice[idx]:
            chain_start = idx
            idx += 1
            while idx < len(pipeline) and node_i_is_choice[idx]:
                idx += 1
            chain_stop = idx
            choices_chains.append((chain_start, chain_stop))
        idx += 1

    for choices_chain in choices_chains:
        constraints = set()

        chain_start = choices_chain[0]
        chain_stop = choices_chain[1]
        chain_length = chain_stop - chain_start

        # Add one to have also have chain_length in the range
        for sub_chain_length in range(2, chain_length + 1):
            for start_idx in range(chain_start,
                                   chain_stop - sub_chain_length + 1):
                indices = range(start_idx, start_idx + sub_chain_length)
                node_names = [pipeline[idx][0] for idx in indices]

                num_node_choices = []
                node_choice_names = []
                skip_array_shape = []

                for idx in indices:
                    node = all_nodes[idx]
                    available_components = node.get_available_components(
                        dataset_properties, include=node_i_choices_names[idx])
                    assert len(available_components) > 0, len(
                        available_components)
                    skip_array_shape.append(len(available_components))
                    num_node_choices.append(range(len(available_components)))
                    node_choice_names.append(
                        [name for name in available_components])

                # Figure out which choices were already abandoned
                skip_array = np.zeros(skip_array_shape)
                for product in itertools.product(*num_node_choices):
                    for node_idx, choice_idx in enumerate(product):
                        node_idx += start_idx
                        slices_ = [
                            slice(None) if idx != node_idx else slice(
                                choice_idx, choice_idx + 1)
                            for idx in range(len(matches.shape))
                        ]

                        if np.sum(matches[slices_]) == 0:
                            skip_array[product] = 1

                for product in itertools.product(*num_node_choices):
                    if skip_array[product]:
                        continue

                    slices = []
                    for idx in range(len(matches.shape)):
                        if idx not in indices:
                            slices.append(slice(None))
                        else:
                            slices.append(
                                slice(product[idx - start_idx],
                                      product[idx - start_idx] + 1))

                    # This prints the affected nodes
                    # print [node_choice_names[i][product[i]]
                    #        for i in range(len(product))], \
                    #     np.sum(matches[slices])

                    if np.sum(matches[slices]) == 0:
                        constraint = tuple([(node_names[i],
                                             node_choice_names[i][product[i]])
                                            for i in range(len(product))])

                        # Check if a more general constraint/forbidden clause
                        #  was already added
                        continue_ = False
                        for constraint_length in range(2, len(constraint)):
                            for constraint_start_idx in range(
                                    len(constraint) - constraint_length + 1):
                                sub_constraint = constraint[
                                    constraint_start_idx:constraint_start_idx +
                                    constraint_length]
                                if sub_constraint in constraints:
                                    continue_ = True
                                    break
                            if continue_:
                                break
                        if continue_:
                            continue

                        constraints.add(constraint)

                        forbiddens = []
                        for i in range(len(product)):
                            forbiddens.append(
                                ForbiddenEqualsClause(
                                    conf_space.get_hyperparameter(
                                        node_names[i] + ":__choice__"),
                                    node_choice_names[i][product[i]]))
                        forbidden = ForbiddenAndConjunction(*forbiddens)
                        conf_space.add_forbidden_clause(forbidden)

    return conf_space
Ejemplo n.º 20
0
def read(pcs_string, debug=False):
    configuration_space = ConfigurationSpace()
    conditions = []
    forbidden = []

    # some statistics
    ct = 0
    cont_ct = 0
    cat_ct = 0
    line_ct = 0

    for line in pcs_string:
        line_ct += 1

        if "#" in line:
            # It contains a comment
            pos = line.find("#")
            line = line[:pos]

        # Remove quotes and whitespaces at beginning and end
        line = line.replace('"', "").replace("'", "")
        line = line.strip()

        if "|" in line:
            # It's a condition
            try:
                c = pp_condition.parseString(line)
                conditions.append(c)
            except pyparsing.ParseException:
                raise NotImplementedError("Could not parse condition: %s" %
                                          line)
            continue
        if "}" not in line and "]" not in line:
            print "Skipping: %s" % line
            continue
        if line.startswith("{") and line.endswith("}"):
            forbidden.append(line)
            continue
        if len(line.strip()) == 0:
            continue

        ct += 1
        param = None
        # print "Parsing: " + line

        create = {
            "int": UniformIntegerHyperparameter,
            "float": UniformFloatHyperparameter,
            "categorical": CategoricalHyperparameter
        }

        try:
            param_list = pp_cont_param.parseString(line)
            il = param_list[9:]
            if len(il) > 0:
                il = il[0]
            param_list = param_list[:9]
            name = param_list[0]
            lower = float(param_list[2])
            upper = float(param_list[4])
            paramtype = "int" if "i" in il else "float"
            log = True if "l" in il else False
            default = float(param_list[7])
            param = create[paramtype](name=name,
                                      lower=lower,
                                      upper=upper,
                                      q=None,
                                      log=log,
                                      default=default)
            cont_ct += 1
        except pyparsing.ParseException:
            pass

        try:
            param_list = pp_cat_param.parseString(line)
            name = param_list[0]
            choices = [c for c in param_list[2:-4:2]]
            default = param_list[-2]
            param = create["categorical"](name=name,
                                          choices=choices,
                                          default=default)
            cat_ct += 1
        except pyparsing.ParseException:
            pass

        if param is None:
            raise NotImplementedError("Could not parse: %s" % line)

        configuration_space.add_hyperparameter(param)

    for clause in forbidden:
        # TODO test this properly!
        # TODO Add a try/catch here!
        # noinspection PyUnusedLocal
        param_list = pp_forbidden_clause.parseString(clause)
        tmp_list = []
        clause_list = []
        for value in param_list[1:]:
            if len(tmp_list) < 3:
                tmp_list.append(value)
            else:
                # So far, only equals is supported by SMAC
                if tmp_list[1] == '=':
                    # TODO maybe add a check if the hyperparameter is
                    # actually in the configuration space
                    clause_list.append(
                        ForbiddenEqualsClause(
                            configuration_space.get_hyperparameter(
                                tmp_list[0]), tmp_list[2]))
                else:
                    raise NotImplementedError()
                tmp_list = []
        configuration_space.add_forbidden_clause(
            ForbiddenAndConjunction(*clause_list))

    #Now handle conditions
    for condition in conditions:
        child_name = condition[0]
        child = configuration_space.get_hyperparameter(child_name)
        parent_name = condition[2]
        parent = configuration_space.get_hyperparameter(parent_name)
        restrictions = condition[5:-1:2]

        # TODO: cast the type of the restriction!
        if len(restrictions) == 1:
            condition = EqualsCondition(child, parent, restrictions[0])
        else:
            condition = InCondition(child, parent, values=restrictions)

        configuration_space.add_condition(condition)

    if debug:
        print
        print "============== Reading Results"
        print "First 10 lines:"
        sp_list = ["%s: %s" % (j, str(searchspace[j])) for j in searchspace]
        print "\n".join(sp_list[:10])
        print
        print "#Invalid lines: %d ( of %d )" % (line_ct - len(conditions) - ct,
                                                line_ct)
        print "#Parameter: %d" % len(searchspace)
        print "#Conditions: %d" % len(conditions)
        print "#Conditioned params: %d" % sum([
            1 if len(searchspace[j].conditions[0]) > 0 else 0
            for j in searchspace
        ])
        print "#Categorical: %d" % cat_ct
        print "#Continuous: %d" % cont_ct
    return configuration_space