def test_assigned_ids(self): search_space = search_space_reader.create_search_space( "res/search_space/ml-plan-ul.json") c1 = search_space.get_component_by_name( "sklearn.pipeline.make_pipeline") c2 = search_space.get_component_by_name("sklearn.pipeline.make_union") components = [c1, c2] used_ids = [] for component in components: for ri in component.get_required_interfaces(): assert "id" in ri assert ri["id"] not in used_ids used_ids.append(ri["id"])
def __init__(self, search_config, optimizers): self.config = search_config self.search_space = create_search_space( *self.config.search_space_files) self.stop_event = Event() self.graph_generator = MctsGraphGenerator( self.search_space, self.config.start_component_name, optimizers, self.config.pipeline_evaluator_class, self.stop_event, self.config.timeout_for_pipeline_evaluation, self.config.data_x, self.config.data_y, self.config.seed, self.config.numpy_random_state, ) self.root_node = self.graph_generator.get_root_node() self.random_selection = self.config.random_node_selection
from frankensteins_automl.search_space import search_space_reader search_space = search_space_reader.create_search_space( "res/search_space/scikit-learn-classifiers-tpot.json") class TestSearchSpaceReader: def test_component_retrievement(self): component = search_space.get_component_by_name( "sklearn.naive_bayes.GaussianNB") assert component.get_name() == "sklearn.naive_bayes.GaussianNB" def test_non_existing_component_retrievement(self): assert search_space.get_component_by_name("abc.def.GHI") is None def test_interface_retrievement(self): providing_components = search_space.get_components_providing_interface( "BaseLearner") providing_components_names = [] for component in providing_components: providing_components_names.append(component.get_name()) components = [ "sklearn.naive_bayes.GaussianNB", "sklearn.naive_bayes.BernoulliNB", "sklearn.naive_bayes.MultinomialNB", "sklearn.tree.DecisionTreeClassifier", "sklearn.ensemble.RandomForestClassifier", "sklearn.ensemble.GradientBoostingClassifier", "sklearn.neighbors.KNeighborsClassifier", "sklearn.svm.LinearSVC", ]
from time import perf_counter from frankensteins_automl.search_space.search_space_graph import ( SearchSpaceRestProblem, ) from frankensteins_automl.machine_learning.arff_reader import read_arff from frankensteins_automl.machine_learning.pipeline.pipeline_evaluator import ( PipelineEvaluator, ) from frankensteins_automl.search_space.search_space_reader import ( create_search_space, ) search_space = create_search_space( "res/search_space/ml-plan-ul.json", "res/search_space/scikit-learn-classifiers-tpot.json", "res/search_space/scikit-learn-preprocessors-tpot.json", ) c1 = search_space.get_component_by_name("sklearn.pipeline.make_pipeline") c2 = search_space.get_component_by_name("sklearn.preprocessing.Binarizer") c3 = search_space.get_component_by_name("sklearn.tree.DecisionTreeClassifier") required_interfaces = [ { "interface": { "name": "AbstractPreprocessor", "construction_key": 0, "id": c1.get_required_interfaces()[0]["id"], }, "satisfied": True, "component_id": "a3f1fa38-0979-11ea-ba87-309c23b50ce0", "satisfied_with": "a3f20a5a-0979-11ea-ba87-309c23b50ce0", }, { "interface": { "name": "BasicClassifier",
def test_non_json_path(self): assert search_space_reader.create_search_space("README.md") is None
def test_non_string_path(self): assert search_space_reader.create_search_space(123) is None
def test_json_parsing(self): search_space = search_space_reader.create_search_space( "res/search_space/ml-plan-ul.json") assert isinstance(search_space, SearchSpace)