Exemple #1
0
def load_pipeline(pipeline_file: typing.Union[str, typing.Dict]):
    """
    Load pipeline from a pipeline URI

    Parameters
    ----------
    pipeline_file: Union[str, dict]
        The URI pointing to a json file of pipeline or dict of string that is a pipeline

    Returns
    -------
    pipeline: Pipeline
        An object of Pipeline

    """
    if isinstance(pipeline_file, dict):
        try:
            with d3m_utils.silence():
                pipeline = Pipeline.from_json_structure(pipeline_file)
        except:
            pipeline = None
    else:
        with d3m_utils.silence():
            pipeline = get_pipeline(pipeline_path=pipeline_file,
                                    load_all_primitives=False)
    return pipeline
Exemple #2
0
def available_primitives():
    primitives_info = []

    with d3m_utils.silence():
        for primitive_path in d3m_index.search():
            if primitive_path in PrimitivesList.BlockList:
                continue

            try:
                primitive = d3m_index.get_primitive(primitive_path)
                primitive_id = primitive.metadata.query()['id']
                version = primitive.metadata.query()['version']
                python_path = primitive.metadata.query()['python_path']
                name = primitive.metadata.query()['name']
                digest = primitive.metadata.query().get('digest', None)
                primitive_info = {
                    'id': primitive_id,
                    'version': version,
                    'python_path': python_path,
                    'name': name,
                    'digest': digest
                }
                primitives_info.append(primitive_info)
            except:
                continue
    return primitives_info
Exemple #3
0
 def setUpClass(cls):
     # To hide any logging or stdout output.
     with d3m_utils.silence():
         index.register_primitive('d3m.primitives.regression.monomial.Test', MonomialPrimitive)
         index.register_primitive('d3m.primitives.data_generation.random.Test', RandomPrimitive)
         index.register_primitive('d3m.primitives.operator.sum.Test', SumPrimitive)
         index.register_primitive('d3m.primitives.operator.increment.Test', IncrementPrimitive)
Exemple #4
0
    def produce_pipeline(
            self, data_handler, fitted_pipeline_id: str, input_data_id: str, *,
            timeout: float = None, expose_outputs: bool = False
    ) -> PipelineResult:

        pipeline_result = PipelineResult(fitted_pipeline_id=fitted_pipeline_id)
        pipeline_result.status = "RUNNING"
        pipeline_result.method_called = "produce"
        pipeline_result.fitted_pipeline_id = fitted_pipeline_id

        request_id = data_handler.get_data.remote(input_data_id)
        input_data = ray.get(request_id)

        with d3m_utils.silence():
            output, result = runtime_module.produce(
                fitted_pipeline=self.fitted_pipelines[fitted_pipeline_id], test_inputs=input_data,
                expose_produced_outputs=expose_outputs
            )

        if result.has_error():
            pipeline_result.status = "ERRORED"
            pipeline_result.error = result.error
        else:
            pipeline_result.status = "COMPLETED"
            if self.store_results:
                pipeline_result.exposed_outputs = save_exposed_values(result.values, fitted_pipeline_id, self.scratch_dir)
                pipeline_result.output = save_exposed_values(output, fitted_pipeline_id, self.scratch_dir)
            else:
                pipeline_result.exposed_outputs = result.values
                pipeline_result.output = output

        if self.store_results:
            pipeline_result.pipeline_run = save_pipeline_run(result.pipeline_run, self.scratch_dir)

        return pipeline_result
Exemple #5
0
    def test_env_vars(self):
        self._set_env_vars()
        try:
            with utils.silence():
                env = RuntimeEnvironment()

            self.assertEqual(
                env['base_docker_image']['image_name'],
                os.environ[D3M_BASE_IMAGE_NAME],
                'base_image_name incorrectly extracted from environment variables'
            )
            self.assertEqual(
                env['base_docker_image']['image_digest'],
                os.environ[D3M_BASE_IMAGE_DIGEST],
                'base_image_digest incorrectly extracted from environment variables'
            )
            self.assertEqual(
                env['docker_image']['image_name'], os.environ[D3M_IMAGE_NAME],
                'image_name incorrectly extracted from environment variables')
            self.assertEqual(
                env['docker_image']['image_digest'],
                os.environ[D3M_IMAGE_DIGEST],
                'image_digest incorrectly extracted from environment variables'
            )

        finally:
            self._unset_env_vars()
Exemple #6
0
def create_primitive(primitive_id, python_path):
    # Silence any validation warnings.
    with utils.silence():

        class Primitive(transformer.TransformerPrimitiveBase[Inputs, Outputs,
                                                             Hyperparams]):
            metadata = metadata_base.PrimitiveMetadata({
                'id':
                primitive_id,
                'version':
                '0.1.0',
                'name':
                "Test Primitive",
                'python_path':
                python_path,
                'algorithm_types': [
                    metadata_base.PrimitiveAlgorithmType.
                    PRINCIPAL_COMPONENT_ANALYSIS,
                ],
                'primitive_family':
                metadata_base.PrimitiveFamily.FEATURE_SELECTION,
            })

            def produce(self,
                        *,
                        inputs: Inputs,
                        timeout: float = None,
                        iterations: int = None) -> base.CallResult[Outputs]:
                pass

        return Primitive
Exemple #7
0
def random_values(hyperparameters, seed_state, tried_so_far, max_collisions):
    collisions = 0
    while 1:
        # Generate a set of random values.
        hps = HyperParameters()
        with d3m_utils.silence():
            for hp in hyperparameters.space:
                hps.merge([hp])
                if hps.is_active(hp):  # Only active params in `values`.
                    hps.values[hp.name] = hp.random_sample(seed_state)
                    seed_state += 1
        # Pick out the invalid hyper-parameters
        patch_invalid_hyperamaeters(hps)

        values = hps.values
        # Keep trying until the set of values is unique,
        # or until we exit due to too many collisions.
        values_hash = compute_values_hash(values)
        if values_hash in tried_so_far:
            collisions += 1
            if collisions > max_collisions:
                return None
            continue
        tried_so_far.add(values_hash)
        break
    return values, seed_state
Exemple #8
0
 def test_deterministic_id(self):
     with utils.silence():
         env = RuntimeEnvironment()
     id_ = env['id']
     del env['id']
     gen_id = utils.compute_hash_id(env)
     self.assertEqual(id_, gen_id,
                      'environment.id not deterministically generated')
    def test_inputs_across_samples(self):
        with self.assertRaisesRegex(exceptions.InvalidPrimitiveCodeError, 'Method \'.*\' has an argument \'.*\' set as computing across samples, but it does not exist'):
            # Silence any validation warnings.
            with utils.silence():
                class TestPrimitive(transformer.TransformerPrimitiveBase[Inputs, Outputs, Hyperparams]):
                    metadata = metadata_base.PrimitiveMetadata({
                        'id': '67568a80-dec2-4597-a10f-39afb13d3b9c',
                        'version': '0.1.0',
                        'name': "Test Primitive",
                        'python_path': 'd3m.primitives.test.TestPrimitive',
                        'algorithm_types': [
                            metadata_base.PrimitiveAlgorithmType.NUMERICAL_METHOD,
                        ],
                        'primitive_family': metadata_base.PrimitiveFamily.OPERATOR,
                        'hyperparams_to_tune': [
                            'foobar',
                        ]
                    })

                    @base.inputs_across_samples('foobar')
                    def produce(self, *, inputs: Inputs, timeout: float = None, iterations: int = None) -> base.CallResult[Outputs]:
                        pass

        with self.assertRaisesRegex(exceptions.InvalidPrimitiveCodeError, 'Method \'.*\' has an argument \'.*\' set as computing across samples, but it is not a PIPELINE argument'):
            # Silence any validation warnings.
            with utils.silence():
                class TestPrimitive(transformer.TransformerPrimitiveBase[Inputs, Outputs, Hyperparams]):
                    metadata = metadata_base.PrimitiveMetadata({
                        'id': '67568a80-dec2-4597-a10f-39afb13d3b9c',
                        'version': '0.1.0',
                        'name': "Test Primitive",
                        'python_path': 'd3m.primitives.test.TestPrimitive',
                        'algorithm_types': [
                            metadata_base.PrimitiveAlgorithmType.NUMERICAL_METHOD,
                        ],
                        'primitive_family': metadata_base.PrimitiveFamily.OPERATOR,
                        'hyperparams_to_tune': [
                            'foobar',
                        ]
                    })

                    @base.inputs_across_samples('timeout')
                    def produce(self, *, inputs: Inputs, timeout: float = None, iterations: int = None) -> base.CallResult[Outputs]:
                        pass
Exemple #10
0
    def setUpClass(cls):
        to_register = {
            'd3m.primitives.evaluation.train_score_dataset_split.Common':
            TrainScoreDatasetSplitPrimitive,
        }

        # To hide any logging or stdout output.
        with utils.silence():
            for python_path, primitive in to_register.items():
                index.register_primitive(python_path, primitive)
Exemple #11
0
 def __init__(self, *, random_seed: int = 0, volumes_dir: str = None, scratch_dir: str = None, store_results=False,
              blocklist=()) -> None:
     self.random_seed = random_seed
     self.volumes_dir = volumes_dir
     self.scratch_dir = scratch_dir
     self.fitted_pipelines = {}
     with d3m_utils.silence():
         d3m_index.load_all(blocklist=blocklist)
         self.runtime_environment = pipeline_run_module.RuntimeEnvironment()
     self.store_results = store_results
Exemple #12
0
    def _predict_semantic_type(self, input_column: container.DataFrame) -> str:
        column_name = input_column.metadata.query(('ALL_ELEMENTS', 0))['name']

        with d3m_utils.silence():
            column_name_emb = self._emb_model.encode([column_name.lower()],
                                                     show_progress_bar=False)

        prediction = self._profiler_model.predict(column_name_emb)
        assert prediction.shape[0] == 1

        return prediction[0]
Exemple #13
0
def cumulative_prob_to_value(prob, hp):
    """Convert a value from [0, 1] to a hyperparameter value."""
    if isinstance(hp, hyperparams.Constant):
        return hp.get_default()
    elif isinstance(hp, hyperparams.UniformBool):
        return bool(prob >= 0.5)
    elif isinstance(
            hp,
        (hyperparams.Choice, hyperparams.Enumeration, hyperparams.Union)):
        if isinstance(hp, hyperparams.Choice):
            choices = list(hp.choices.keys())
        elif isinstance(hp, hyperparams.Union):
            choices = list(hp.configuration.keys())
        else:
            choices = hp.values
        ele_prob = 1 / len(choices)
        index = int(math.floor(prob / ele_prob))
        # Can happen when `prob` is very close to 1.
        if index == len(choices):
            index = index - 1
        if isinstance(hp, hyperparams.Union):
            key = choices[index]
            with d3m_utils.silence():
                val = hp.configuration[key].sample()
            return val
        return choices[index]
    elif isinstance(
            hp,
        (hyperparams.UniformInt, hyperparams.Uniform, hyperparams.Bounded)):
        import sys
        epsilon = sys.float_info.epsilon
        lower, upper = hp.lower, hp.upper
        if lower is None or upper is None:
            return hp.get_default()
        value = prob * (upper - lower) + lower
        if hp.structural_type == int:
            return int(value)
        if value == lower and not hp.lower_inclusive:
            return value + epsilon
        if value == upper and not hp.upper_inclusive:
            return value - epsilon
        return value
    elif isinstance(hp, hyperparams.LogUniform):
        lower, upper = hp.lower, hp.upper
        if lower is None or upper is None:
            return hp.get_default()
        value = lower * math.pow(upper / lower, prob)
        return value
    elif isinstance(hp, (hyperparams.Normal, hyperparams.LogNormal)):
        return norm.ppf(prob, loc=hp.mu, scale=hp.sigma)
    else:
        raise ValueError('Unrecognized HyperParameter type: {}'.format(hp))
Exemple #14
0
    def __init__(self,
                 *,
                 random_seed: int = 0,
                 volumes_dir: str = None,
                 scratch_dir: str = None) -> None:
        super().__init__(random_seed=random_seed,
                         volumes_dir=volumes_dir,
                         scratch_dir=scratch_dir)
        self.fitted_pipelines = {}
        self.request_results = {}

        with d3m_utils.silence():
            self.runtime_environment = pipeline_run_module.RuntimeEnvironment()
Exemple #15
0
    def test_validate(self):
        # To hide any logging or stdout output.
        with utils.silence():
            index.register_primitive(
                'd3m.primitives.data_transformation.column_parser.Common',
                ColumnParserPrimitive)

        primitive = index.get_primitive_by_id(
            'd510cb7a-1782-4f51-b44c-58f0236e47c7')

        primitive_description = primitive.metadata.to_json_structure()

        pipeline_run.validate_primitive(primitive_description)
    def test_package_validation(self):
        Inputs = container.List
        Outputs = container.List

        class Hyperparams(hyperparams.Hyperparams):
            pass

        with self.assertRaisesRegex(ValueError, 'Invalid package name'):
            # Silence any validation warnings.
            with utils.silence():

                class TestPrimitive(
                        transformer.TransformerPrimitiveBase[Inputs, Outputs,
                                                             Hyperparams]):
                    metadata = metadata_base.PrimitiveMetadata({
                        'id':
                        '67568a80-dec2-4597-a10f-39afb13d3b9c',
                        'version':
                        '0.1.0',
                        'name':
                        "Test Primitive",
                        'source': {
                            'name': 'Test',
                        },
                        'installation': [{
                            'type':
                            metadata_base.PrimitiveInstallationType.PIP,
                            'package':
                            'git+https://gitlab.com/datadrivendiscovery/tests-data.git',
                            'version':
                            '0.1.0',
                        }],
                        'python_path':
                        'd3m.primitives.test.TestPrimitive',
                        'algorithm_types': [
                            metadata_base.PrimitiveAlgorithmType.
                            NUMERICAL_METHOD,
                        ],
                        'primitive_family':
                        metadata_base.PrimitiveFamily.OPERATOR,
                    })

                    def produce(
                            self,
                            *,
                            inputs: Inputs,
                            timeout: float = None,
                            iterations: int = None
                    ) -> base.CallResult[Outputs]:
                        pass
Exemple #17
0
    def SearchSolutions(self, request, context):
        user_agent = request.user_agent
        logger.info('method=SearchSolution, agent=%s', user_agent)

        # Checking version of protocol.
        if request.version != self.version:
            logger.info(' method=SearchSolution, info=Different api version%s',
                        self.version)

        # Types allowed by client
        allowed_value_types = list(request.allowed_value_types)

        if not allowed_value_types:
            allowed_value_types = ALLOWED_VALUE_TYPES

        problem_description = utils.decode_problem_description(request.problem)

        # Parsing and storing Pipeline Template (store this to a file instead of passing it)
        with d3m_utils.silence():
            template = utils.decode_pipeline_description(
                pipeline_description=request.template,
                resolver=Resolver(
                    primitives_blocklist=PrimitivesList.BlockList))

        time_bound_search = request.time_bound_search
        time_bound_search = time_bound_search * 60

        input_data = [load_data(utils.decode_value(x)) for x in request.inputs]

        search = SearchWrappers.remote(
            search_class=DataDrivenSearch,
            problem_description=problem_description,
            backend=self.backend,
            primitives_blocklist=PrimitivesList.BlockList,
            ranking_function=dummy_ranking_function,
            n_workers=self.n_workers)

        request_id = search.get_search_id.remote()
        search_id = ray.get(request_id)

        # print('got search_id')
        self.searches[search_id] = search
        request_id = self.searches[search_id].search_request.remote(
            time_left=time_bound_search, input_data=input_data)

        self.request_mapping[search_id] = request_id
        self.solutions[search_id] = []
        self.problem_descriptions[search_id] = problem_description
        response = core_pb2.SearchSolutionsResponse(search_id=search_id)
        return response
Exemple #18
0
    def do_describe(self, solution_id):
        pipeline_description = None
        try:
            pipeline_description = self.core.DescribeSolution(
                pb_core.DescribeSolutionRequest(
                    solution_id=solution_id, )).pipeline
        except:
            logger.exception("Exception during describe %r", solution_id)

        with silence():
            pipeline = decode_pipeline_description(
                pipeline_description, pipeline_module.NoResolver())

        return pipeline.to_json_structure()
Exemple #19
0
def save_pipeline_run(pipeline_run, path):
    """
    A function that make a copy of an already scored pipeline to scored directory according with specifications.

    Parameters
    ----------
    pipeline_run : PipelineRun
        A pipeline_run to be save into the path
    path: str
        Path where the pipeline_run will be stored

    Returns
    -------
    pipeline_run_path : str
        Path where the pipeline_run is stored.
    """

    if pipeline_run is None:
        return

    if isinstance(pipeline_run, list):
        first = True
        pipeline_run_path = os.path.join(
            path, '{}.yml'.format(pipeline_run[0].pipeline['id']))
        with d3m_utils.silence():
            with open(pipeline_run_path, 'w') as file:
                for run in pipeline_run:
                    run.to_yaml(file, appending=not first)
                    first = False
    else:
        pipeline_run_path = os.path.join(
            path, '{}.yml'.format(pipeline_run.pipeline['id']))
        with d3m_utils.silence():
            with open(pipeline_run_path, 'w') as file:
                pipeline_run.to_yaml(file)

    return pipeline_run_path
Exemple #20
0
    def evaluate_pipeline(
            self, data_handler, problem_description: Problem, pipeline: Pipeline,
            input_data_id: str, *, metrics: typing.Sequence[typing.Dict],
            data_preparation_pipeline: Pipeline = None, scoring_pipeline: Pipeline = None,
            data_preparation_params: typing.Dict[str, str] = None, scoring_params: typing.Dict[str, str] = None,
            timeout: float = None
    ) -> PipelineResult:

        with d3m_utils.silence():
            pipeline_result = PipelineResult(pipeline=pipeline)
        pipeline_result.status = "RUNNING"
        pipeline_result.method_called = "evaluate"

        request_id = data_handler.get_data.remote(input_data_id)
        input_data = ray.get(request_id)

        with d3m_utils.silence():
            scores, results = runtime_module.evaluate(
                pipeline=pipeline, inputs=input_data, data_pipeline=data_preparation_pipeline,
                scoring_pipeline=scoring_pipeline, problem_description=problem_description,
                data_params=data_preparation_params, metrics=metrics, context=Context.TESTING,
                scoring_params=scoring_params, hyperparams=None, random_seed=self.random_seed,
                data_random_seed=self.random_seed, scoring_random_seed=self.random_seed,
                volumes_dir=self.volumes_dir, scratch_dir=self.scratch_dir, runtime_environment=self.runtime_environment
            )

        if results.has_error():
            pipeline_result.status = "ERRORED"
            pipeline_result.error = [result.error for result in results]
        else:
            pipeline_result.status = "COMPLETED"
            pipeline_result.scores = runtime_module.combine_folds(scores)

        if self.store_results:
            pipeline_result.pipeline_run = save_pipeline_run(results.pipeline_runs, self.scratch_dir)
        return pipeline_result
Exemple #21
0
def fetch_from_file(problem_description, path):
    # ToDo should use yield
    task_type, task_subtype, data_types, semi = _get_task_description(
        problem_description)

    pipelines = []
    with open(path) as file:
        possible_pipelines = json.load(file)
        with d3m_utils.silence():
            for task_type_in_file, pipeline_infos in possible_pipelines.items(
            ):
                if task_type_in_file == task_type:
                    for pipeline_info in pipeline_infos:
                        pipeline = pipeline_utils.load_pipeline(pipeline_info)
                        pipelines.append(pipeline)
    return pipelines
Exemple #22
0
    def fit_pipeline(
            self, data_handler, problem_description: Problem, pipeline:  Pipeline,
            input_data_id: str, *, timeout: float = None, expose_outputs: bool = False
    ) -> PipelineResult:
        pipeline_result = PipelineResult(pipeline=pipeline)
        pipeline_result.status = "RUNNING"
        pipeline_result.method_called = "fit"

        request_id = data_handler.get_data.remote(input_data_id)
        input_data = ray.get(request_id)

        is_standard_pipeline = False
        if len(input_data) == 1 and len(pipeline.outputs) == 1:
            is_standard_pipeline = True

        with d3m_utils.silence():
            runtime, output, result = runtime_module.fit(
                pipeline=pipeline, inputs=input_data, problem_description=problem_description, context=Context.TESTING,
                hyperparams=None, random_seed=self.random_seed, volumes_dir=self.volumes_dir,
                scratch_dir=self.scratch_dir,
                runtime_environment=self.runtime_environment, is_standard_pipeline=is_standard_pipeline,
                expose_produced_outputs=expose_outputs
            )

        if result.has_error():
            pipeline_result.status = "ERRORED"
            pipeline_result.error = result.error
        else:
            pipeline_result.status = "COMPLETED"
            fitted_pipeline_id = str(uuid.uuid4())

            if self.store_results:
                pipeline_result.exposed_outputs = save_exposed_values(result.values, pipeline.id, self.scratch_dir)
                pipeline_result.output = save_exposed_values(output, pipeline.id, self.scratch_dir)
            else:
                pipeline_result.exposed_outputs = result.values
                pipeline_result.output = output

            pipeline_result.fitted_pipeline_id = fitted_pipeline_id
            self.fitted_pipelines[fitted_pipeline_id] = runtime

        if self.store_results:
            pipeline_result.pipeline_run = save_pipeline_run(result.pipeline_run, self.scratch_dir)

        return pipeline_result
    def test_multi_produce_missing_argument(self):
        with self.assertRaisesRegex(exceptions.InvalidPrimitiveCodeError, '\'multi_produce\' method arguments have to be an union of all arguments of all produce methods, but it does not accept all expected arguments'):
            # Silence any validation warnings.
            with utils.silence():
                class TestPrimitive(transformer.TransformerPrimitiveBase[Inputs, Outputs, Hyperparams]):
                    metadata = metadata_base.PrimitiveMetadata({
                        'id': '67568a80-dec2-4597-a10f-39afb13d3b9c',
                        'version': '0.1.0',
                        'name': "Test Primitive",
                        'python_path': 'd3m.primitives.test.TestPrimitive',
                        'algorithm_types': [
                            metadata_base.PrimitiveAlgorithmType.NUMERICAL_METHOD,
                        ],
                        'primitive_family': metadata_base.PrimitiveFamily.OPERATOR,
                    })

                    def produce(self, *, inputs: Inputs, second_inputs: Inputs, timeout: float = None, iterations: int = None) -> base.CallResult[Outputs]:
                        pass
    def test_produce_using_produce_methods(self):
        with self.assertRaisesRegex(exceptions.InvalidPrimitiveCodeError, 'Produce method cannot use \'produce_methods\' argument'):
            # Silence any validation warnings.
            with utils.silence():
                class TestPrimitive(transformer.TransformerPrimitiveBase[Inputs, Outputs, Hyperparams]):
                    metadata = metadata_base.PrimitiveMetadata({
                        'id': '67568a80-dec2-4597-a10f-39afb13d3b9c',
                        'version': '0.1.0',
                        'name': "Test Primitive",
                        'python_path': 'd3m.primitives.test.TestPrimitive',
                        'algorithm_types': [
                            metadata_base.PrimitiveAlgorithmType.NUMERICAL_METHOD,
                        ],
                        'primitive_family': metadata_base.PrimitiveFamily.OPERATOR,
                    })

                    def produce(self, *, inputs: Inputs, produce_methods: typing.Sequence[str], timeout: float = None, iterations: int = None) -> base.CallResult[Outputs]:
                        pass
Exemple #25
0
    def DescribeSolution(self, request, context):
        solution_id = request.solution_id
        logger.info('method=DescribeSolution, solution_id=%s', solution_id)

        pipeline, _, _ = self.get_solution_problem(solution_id)
        if pipeline is None:
            logger.info(
                'method=DescribeSolution, solution_id=%s, error=Solution_id not found',
                solution_id)
            response = core_pb2.DescribeSolutionResponse()
            return response

        with d3m_utils.silence():
            pipeline = utils.encode_pipeline_description(
                pipeline, ALLOWED_VALUE_TYPES, Path.TEMP_STORAGE_ROOT)

        response = core_pb2.DescribeSolutionResponse(pipeline=pipeline)
        return response
    def test_neural_network_mixin(self):
        class MyNeuralNetworkModuleBase:
            pass

        class Params(params.Params):
            pass

        class MyNeuralNetworkModule(MyNeuralNetworkModuleBase):
            pass

        # Silence any validation warnings.
        with utils.silence():
            class TestPrimitive(
                base.NeuralNetworkModuleMixin[Inputs, Outputs, Params, Hyperparams, MyNeuralNetworkModuleBase],
                unsupervised_learning.UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, Params, Hyperparams],
            ):
                metadata = metadata_base.PrimitiveMetadata({
                    'id': '4164deb6-2418-4c96-9959-3d475dcf9584',
                    'version': '0.1.0',
                    'name': "Test neural network module",
                    'python_path': 'd3m.primitives.layer.super.TestPrimitive',
                    'algorithm_types': [
                        metadata_base.PrimitiveAlgorithmType.CONVOLUTIONAL_NEURAL_NETWORK_LAYER,
                    ],
                    'primitive_family': metadata_base.PrimitiveFamily.LAYER,
                })

                def produce(self, *, inputs: Inputs, timeout: float = None, iterations: int = None) -> base.CallResult[Outputs]:
                    raise exceptions.NotSupportedError

                def set_training_data(self, *, inputs: Inputs) -> None:
                    raise exceptions.NotSupportedError

                def fit(self, *, timeout: float = None, iterations: int = None) -> base.CallResult[None]:
                    raise exceptions.NotSupportedError

                def get_params(self) -> Params:
                    return Params()

                def set_params(self, *, params: Params) -> None:
                    pass

                def get_module(self, *, input_module: MyNeuralNetworkModuleBase) -> MyNeuralNetworkModuleBase:
                    return MyNeuralNetworkModule()
Exemple #27
0
    def get_solution_problem(self, solution_id):
        describe_search_id = None
        for search_id, solution_ids in self.solutions.items():
            if solution_id in solution_ids:
                describe_search_id = search_id
                break

        if describe_search_id is None:
            return None, None, None

        solution_path = os.path.join(
            SearchPath(describe_search_id).pipelines_scored,
            '{}.json'.format(solution_id))

        with d3m_utils.silence():
            pipeline = load_pipeline(solution_path)

        problem_description = self.problem_descriptions[describe_search_id]
        return pipeline, problem_description, describe_search_id
Exemple #28
0
    def test_no_git_repo(self):
        git_path_moved = False
        if os.path.exists(self.original_git_path):
            os.rename(self.original_git_path, self.moved_git_path)
            git_path_moved = True
        try:
            with utils.silence():
                env = RuntimeEnvironment()

            self.assertEqual(
                env['reference_engine_version'], d3m.__version__,
                'reference_engine_version incorrectly extracted from d3m repo')

            self.assertEqual(
                env['engine_version'], d3m.__version__,
                'reference_engine_version incorrectly extracted from d3m repo')
        finally:
            if git_path_moved:
                os.rename(self.moved_git_path, self.original_git_path)
    def test_hyperparams_to_tune(self):
        with self.assertRaisesRegex(exceptions.InvalidMetadataError, 'Hyper-parameter in \'hyperparams_to_tune\' metadata does not exist'):
            # Silence any validation warnings.
            with utils.silence():
                class TestPrimitive(transformer.TransformerPrimitiveBase[Inputs, Outputs, Hyperparams]):
                    metadata = metadata_base.PrimitiveMetadata({
                        'id': '67568a80-dec2-4597-a10f-39afb13d3b9c',
                        'version': '0.1.0',
                        'name': "Test Primitive",
                        'python_path': 'd3m.primitives.test.TestPrimitive',
                        'algorithm_types': [
                            metadata_base.PrimitiveAlgorithmType.NUMERICAL_METHOD,
                        ],
                        'primitive_family': metadata_base.PrimitiveFamily.OPERATOR,
                        'hyperparams_to_tune': [
                            'foobar',
                        ]
                    })

                    def produce(self, *, inputs: Inputs, timeout: float = None, iterations: int = None) -> base.CallResult[Outputs]:
                        pass
Exemple #30
0
    def produce(self,
                fitted_pipeline: runtime_module.Runtime,
                input_data: typing.Sequence[container.Dataset],
                expose_outputs: bool = False) -> PipelineResult:
        pipeline_result = PipelineResult(fitted_pipeline_id='')

        with d3m_utils.silence():
            output, result = runtime_module.produce(
                fitted_pipeline=fitted_pipeline,
                test_inputs=input_data,
                expose_produced_outputs=expose_outputs)

        if result.has_error():
            pipeline_result.status = "ERRORED"
            pipeline_result.error = result.error
        else:
            pipeline_result.status = "COMPLETED"

            pipeline_result.exposed_outputs = result.values
            pipeline_result.output = output
        return pipeline_result