def test_to_config():
    mds1 = ZenMLMetadataStore()

    # disallow to/from_config for the base class by checking against
    # factory keys
    with pytest.raises(AssertionError):
        mds1.to_config()
Beispiel #2
0
def test_get_pipeline_context():
    mds1 = ZenMLMetadataStore()

    random_pipeline = random.choice(repo.get_pipelines())

    with pytest.raises(expected_query_error):
        _ = mds1.get_pipeline_context(random_pipeline)
def test_get_artifacts_by_execution():
    mds1 = ZenMLMetadataStore()

    # no execution possible
    fake_id = "abcdefg"
    with pytest.raises(ValueError):
        _ = mds1.get_artifacts_by_execution(fake_id)
def test_get_components_status(repo):
    mds1 = ZenMLMetadataStore()

    random_pipeline = random.choice(repo.get_pipelines())

    with pytest.raises(expected_query_error):
        _ = mds1.get_components_status(random_pipeline)
Beispiel #5
0
def test_to_from_config(equal_md_stores):
    mds1 = ZenMLMetadataStore()

    mds2 = ZenMLMetadataStore.from_config(mds1.to_config())

    # TODO: This fails because from_config throws (base store is
    #  not in the factory)
    assert equal_md_stores(mds1, mds2, loaded=True)
def test_get_component_execution(repo):
    mds1 = ZenMLMetadataStore()

    random_pipeline = random.choice(repo.get_pipelines())

    component_name = GDPComponent.SplitGen.name

    with pytest.raises(expected_query_error):
        _ = mds1.get_component_execution(random_pipeline, component_name)
def test_get_pipeline_executions(repo):
    mds1 = ZenMLMetadataStore()

    random_pipeline = random.choice(repo.get_pipelines())

    # if we query a different metadata store for the pipeline,
    # there should be no executions, i.e. an empty list
    with pytest.raises(expected_query_error):
        _ = mds1.get_pipeline_executions(random_pipeline)
def test_get_artifacts_by_component(repo):
    mds1 = ZenMLMetadataStore()

    random_pipeline = random.choice(repo.get_pipelines())

    # pick a component guaranteed to be present
    component_name = GDPComponent.SplitGen.name

    with pytest.raises(expected_query_error):
        _ = mds1.get_artifacts_by_component(random_pipeline, component_name)
def test_get_pipeline_status(repo):
    random_pipeline = random.choice(repo.get_pipelines())

    mds1 = ZenMLMetadataStore()

    # TODO: This returns a NotStarted enum, which may be misleading as the
    #  associated store does not even exist
    # with pytest.raises(expected_query_error):
    assert mds1.get_pipeline_status(random_pipeline) == \
           PipelineStatusTypes.NotStarted.name
Beispiel #10
0
    def from_config(cls, config: Dict):
        """
        Convert from pipeline config to ZenML Pipeline object.

        All steps are also populated and configuration set to parameters set
        in the config file.

        Args:
            config: a ZenML config in dict-form (probably loaded from YAML).
        """
        # start with artifact store
        artifact_store = ArtifactStore(config[keys.GlobalKeys.ARTIFACT_STORE])

        # metadata store
        metadata_store = ZenMLMetadataStore.from_config(
            config=config[keys.GlobalKeys.METADATA_STORE]
        )

        # orchestration backend
        backend = OrchestratorBaseBackend.from_config(
            config[keys.GlobalKeys.BACKEND])

        # pipeline configuration
        p_config = config[keys.GlobalKeys.PIPELINE]
        pipeline_name = p_config[keys.PipelineKeys.NAME]
        pipeline_source = p_config[keys.PipelineKeys.SOURCE]

        # populate steps
        steps_dict: Dict = {}
        for step_key, step_config in p_config[keys.PipelineKeys.STEPS].items():
            steps_dict[step_key] = BaseStep.from_config(step_config)

        # datasource
        datasource = BaseDatasource.from_config(
            config[keys.GlobalKeys.PIPELINE])

        # enable cache
        enable_cache = p_config[keys.PipelineKeys.ENABLE_CACHE]

        class_ = source_utils.load_source_path_class(pipeline_source)

        obj = class_(
            name=cls.get_name_from_pipeline_name(pipeline_name),
            pipeline_name=pipeline_name,
            enable_cache=enable_cache,
            steps_dict=steps_dict,
            backend=backend,
            artifact_store=artifact_store,
            metadata_store=metadata_store,
            datasource=datasource)
        obj._immutable = True
        logger.debug(f'Pipeline {pipeline_name} loaded and and is immutable.')
        return obj
Beispiel #11
0
    def load_config(self, config_path: Dict):
        """
        Sets metadata and artifact_store variables

        Args:
            config_path (str): Path to a .zenml config.
        """
        assert METADATA_KEY in config_path
        assert ARTIFACT_STORE_KEY in config_path
        assert PIPELINES_DIR_KEY in config_path

        self.artifact_store = ArtifactStore(config_path[ARTIFACT_STORE_KEY])
        self.metadata_store = ZenMLMetadataStore.from_config(
            config=config_path[METADATA_KEY])
        self.pipelines_dir = config_path[PIPELINES_DIR_KEY]
Beispiel #12
0
    def from_config(self, config_dict: Dict):
        """
        Sets metadata and artifact_store variables

        Args:
            config_dict (dict): .zenml config object in dict format.
        """
        assert METADATA_KEY in config_dict
        assert ARTIFACT_STORE_KEY in config_dict
        assert PIPELINES_DIR_KEY in config_dict

        self.artifact_store = ArtifactStore(config_dict[ARTIFACT_STORE_KEY])
        self.metadata_store = ZenMLMetadataStore.from_config(
            config=config_dict[METADATA_KEY])
        self.pipelines_dir = config_dict[PIPELINES_DIR_KEY]
Beispiel #13
0
    def from_config(cls, config: Dict):
        """
        Convert from pipeline config to ZenML Pipeline object.

        All steps are also populated and configuration set to parameters set
        in the config file.

        Args:
            config: a ZenML config in dict-form (probably loaded from YAML).
        """
        # populate steps
        steps_dict: Dict = {}
        for step_key, step_config in config[keys.GlobalKeys.STEPS].items():
            steps_dict[step_key] = BaseStep.from_config(step_config)

        env = config[keys.GlobalKeys.ENV]
        pipeline_name = env[keys.EnvironmentKeys.EXPERIMENT_NAME]
        name = BasePipeline.get_name_from_pipeline_name(
            pipeline_name=pipeline_name)

        backends_dict: Dict = {}
        for backend_key, backend_config in env[
                keys.EnvironmentKeys.BACKENDS].items():
            backends_dict[backend_key] = BaseBackend.from_config(
                backend_key, backend_config)

        artifact_store = ArtifactStore(
            env[keys.EnvironmentKeys.ARTIFACT_STORE])
        metadata_store = ZenMLMetadataStore.from_config(
            config=env[METADATA_KEY])

        datasource = BaseDatasource.from_config(config)

        from zenml.core.pipelines.pipeline_factory import pipeline_factory
        pipeline_type = BasePipeline.get_type_from_pipeline_name(pipeline_name)
        class_ = pipeline_factory.get_pipeline_by_type(pipeline_type)

        # TODO: [MEDIUM] Perhaps move some of the logic in the init block here
        #  especially regarding inferring immutability.

        return class_(name=name,
                      pipeline_name=pipeline_name,
                      enable_cache=env[keys.EnvironmentKeys.ENABLE_CACHE],
                      steps_dict=steps_dict,
                      backends_dict=backends_dict,
                      artifact_store=artifact_store,
                      metadata_store=metadata_store,
                      datasource=datasource)
Beispiel #14
0
def set_metadata_store(store_type, args):
    """Set metadata store for local config."""

    try:
        parsed_args = parse_unknown_options(args)
    except AssertionError as e:
        click.echo(str(e))
        return

    # TODO: [LOW] Hard-coded
    config = {'type': store_type, 'args': parsed_args}
    from zenml.core.metadata.metadata_wrapper import ZenMLMetadataStore

    store = ZenMLMetadataStore.from_config(config)
    repo: Repository = Repository.get_instance()
    repo.zenml_config.set_metadata_store(store)

    click.echo(f'Metadata store set to: {store.to_config()}')
def test_from_config():
    config = {MLMetadataKeys.TYPE: None, MLMetadataKeys.ARGS: {}}

    # throws because base MDStore is not in the factory
    with pytest.raises(AssertionError):
        _ = ZenMLMetadataStore.from_config(config)
Beispiel #16
0
def test_metadata_init():

    mds1 = ZenMLMetadataStore()

    with pytest.raises(expected_query_error):
        _ = mds1.store
def test_metadata_init():
    mds1 = ZenMLMetadataStore()

    with pytest.raises(ValueError):
        _ = mds1.store