Esempio n. 1
0
    def __init__(
        self,
        name=None,
        resource_defs=None,
        logger_defs=None,
        system_storage_defs=None,
        description=None,
    ):
        from .system_storage import SystemStorageDefinition, mem_system_storage, fs_system_storage

        self.name = check.opt_str_param(name, 'name', DEFAULT_MODE_NAME)
        self.resource_defs = check.opt_dict_param(
            resource_defs,
            'resource_defs',
            key_type=str,
            value_type=ResourceDefinition)
        self.loggers = (check.opt_dict_param(logger_defs,
                                             'logger_defs',
                                             key_type=str,
                                             value_type=LoggerDefinition)
                        or default_loggers())
        self.system_storage_defs = check.list_param(
            system_storage_defs if system_storage_defs else
            [mem_system_storage, fs_system_storage],
            'system_storage_def',
            of_type=SystemStorageDefinition,
        )
        self.description = check.opt_str_param(description, 'description')
Esempio n. 2
0
def create_creation_data(pipeline_def):
    return EnvironmentClassCreationData(
        pipeline_def.name,
        pipeline_def.solids,
        pipeline_def.dependency_structure,
        mode_definition=None,
        logger_defs=default_loggers(),
    )
Esempio n. 3
0
    def __new__(
        cls,
        name=None,
        resource_defs=None,
        logger_defs=None,
        system_storage_defs=None,
        executor_defs=None,
        description=None,
        intermediate_storage_defs=None,
    ):
        from dagster.core.storage.system_storage import (
            default_system_storage_defs,
            default_intermediate_storage_defs,
        )

        from .system_storage import SystemStorageDefinition
        from .intermediate_storage import IntermediateStorageDefinition

        if system_storage_defs is not None and intermediate_storage_defs is None:
            warnings.warn(
                "system_storage_defs are deprecated and will be removed in 0.10.0 "
                "and should be replaced with "
                "intermediate_storage_defs for intermediates and resource_defs for files"
            )

        return super(ModeDefinition, cls).__new__(
            cls,
            name=check_for_invalid_name_and_warn(name)
            if name else DEFAULT_MODE_NAME,
            resource_defs=check.opt_dict_param(resource_defs,
                                               "resource_defs",
                                               key_type=str,
                                               value_type=ResourceDefinition),
            loggers=(check.opt_dict_param(logger_defs,
                                          "logger_defs",
                                          key_type=str,
                                          value_type=LoggerDefinition)
                     or default_loggers()),
            system_storage_defs=check.list_param(
                system_storage_defs
                if system_storage_defs else default_system_storage_defs,
                "system_storage_defs",
                of_type=SystemStorageDefinition,
            ),
            intermediate_storage_defs=check.list_param(
                intermediate_storage_defs if intermediate_storage_defs else
                default_intermediate_storage_defs,
                "intermediate_storage_defs",
                of_type=IntermediateStorageDefinition,
            ),
            executor_defs=check.list_param(
                executor_defs if executor_defs else default_executors,
                "executor_defs",
                of_type=ExecutorDefinition,
            ),
            description=check.opt_str_param(description, "description"),
        )
Esempio n. 4
0
    def __new__(
        cls,
        name=None,
        resource_defs=None,
        logger_defs=None,
        system_storage_defs=None,
        executor_defs=None,
        description=None,
        intermediate_storage_defs=None,
    ):
        from dagster.core.storage.system_storage import (
            default_system_storage_defs,
            default_intermediate_storage_defs,
        )

        from .system_storage import SystemStorageDefinition
        from .intermediate_storage import IntermediateStorageDefinition

        if system_storage_defs is not None and intermediate_storage_defs is None:
            warnings.warn(
                'system_storage_defs are deprecated and will be removed in 0.10.0 '
                'and should be replaced with '
                'intermediate_storage_defs for intermediates and resource_defs for files'
            )

        return super(ModeDefinition, cls).__new__(
            cls,
            name=check.opt_str_param(name, 'name', DEFAULT_MODE_NAME),
            resource_defs=check.opt_dict_param(resource_defs,
                                               'resource_defs',
                                               key_type=str,
                                               value_type=ResourceDefinition),
            loggers=(check.opt_dict_param(logger_defs,
                                          'logger_defs',
                                          key_type=str,
                                          value_type=LoggerDefinition)
                     or default_loggers()),
            system_storage_defs=check.list_param(
                system_storage_defs
                if system_storage_defs else default_system_storage_defs,
                'system_storage_defs',
                of_type=SystemStorageDefinition,
            ),
            intermediate_storage_defs=check.list_param(
                intermediate_storage_defs if intermediate_storage_defs else
                default_intermediate_storage_defs,
                'intermediate_storage_defs',
                of_type=IntermediateStorageDefinition,
            ),
            executor_defs=check.list_param(
                executor_defs if executor_defs else default_executors,
                'executor_defs',
                of_type=ExecutorDefinition,
            ),
            description=check.opt_str_param(description, 'description'),
        )
Esempio n. 5
0
def create_creation_data(pipeline_def):
    return EnvironmentClassCreationData(
        pipeline_def.name,
        pipeline_def.solids,
        pipeline_def.dependency_structure,
        pipeline_def.mode_definition,
        logger_defs=default_loggers(),
        ignored_solids=[],
        required_resources=set(),
    )
Esempio n. 6
0
def create_creation_data(pipeline_def):
    return RunConfigSchemaCreationData(
        pipeline_def.name,
        pipeline_def.solids,
        pipeline_def.dependency_structure,
        pipeline_def.mode_definition,
        logger_defs=default_loggers(),
        ignored_solids=[],
        required_resources=set(),
        is_using_graph_job_op_apis=pipeline_def.is_job,
    )
Esempio n. 7
0
    def __new__(
        cls,
        name: Optional[str] = None,
        resource_defs: Optional[Dict[str, ResourceDefinition]] = None,
        logger_defs: Optional[Dict[str, LoggerDefinition]] = None,
        executor_defs: Optional[List[ExecutorDefinition]] = None,
        description: Optional[str] = None,
        _config_mapping: Optional[ConfigMapping] = None,
        _partitioned_config: Optional["PartitionedConfig"] = None,
    ):

        from .partition import PartitionedConfig

        resource_defs = check.opt_dict_param(resource_defs,
                                             "resource_defs",
                                             key_type=str,
                                             value_type=ResourceDefinition)

        for key in resource_defs:
            if not key.isidentifier():
                check.failed(
                    f"Resource key '{key}' must be a valid Python identifier.")

        if resource_defs and "io_manager" in resource_defs:
            resource_defs_with_defaults = resource_defs
        else:
            from dagster.core.storage.mem_io_manager import mem_io_manager

            resource_defs_with_defaults = merge_dicts(
                {"io_manager": mem_io_manager}, resource_defs or {})

        return super(ModeDefinition, cls).__new__(
            cls,
            name=check_valid_name(name) if name else DEFAULT_MODE_NAME,
            resource_defs=resource_defs_with_defaults,
            loggers=(check.opt_dict_param(logger_defs,
                                          "logger_defs",
                                          key_type=str,
                                          value_type=LoggerDefinition)
                     or default_loggers()),
            executor_defs=check.list_param(
                executor_defs if executor_defs else default_executors,
                "executor_defs",
                of_type=ExecutorDefinition,
            ),
            description=check.opt_str_param(description, "description"),
            config_mapping=check.opt_inst_param(_config_mapping,
                                                "_config_mapping",
                                                ConfigMapping),
            partitioned_config=check.opt_inst_param(_partitioned_config,
                                                    "_partitioned_config",
                                                    PartitionedConfig),
        )
Esempio n. 8
0
def create_log_manager(
        context_creation_data: ContextCreationData) -> DagsterLogManager:
    check.inst_param(context_creation_data, "context_creation_data",
                     ContextCreationData)

    pipeline_def, mode_def, environment_config, pipeline_run = (
        context_creation_data.pipeline_def,
        context_creation_data.mode_def,
        context_creation_data.environment_config,
        context_creation_data.pipeline_run,
    )

    # The following logic is tightly coupled to the processing of logger config in
    # python_modules/dagster/dagster/core/system_config/objects.py#config_map_loggers
    # Changes here should be accompanied checked against that function, which applies config mapping
    # via ConfigurableDefinition (@configured) to incoming logger configs. See docstring for more details.

    loggers = []
    for logger_key, logger_def in mode_def.loggers.items() or default_loggers(
    ).items():
        if logger_key in environment_config.loggers:
            loggers.append(
                logger_def.logger_fn(
                    InitLoggerContext(
                        environment_config.loggers.get(logger_key,
                                                       {}).get("config"),
                        logger_def,
                        pipeline_def=pipeline_def,
                        run_id=pipeline_run.run_id,
                    )))

    if not loggers:
        for (logger_def, logger_config) in default_system_loggers():
            loggers.append(
                logger_def.logger_fn(
                    InitLoggerContext(
                        logger_config,
                        logger_def,
                        pipeline_def=pipeline_def,
                        run_id=pipeline_run.run_id,
                    )))

    # should this be first in loggers list?
    loggers.append(context_creation_data.instance.get_logger())

    return DagsterLogManager(
        run_id=pipeline_run.run_id,
        logging_tags=get_logging_tags(pipeline_run),
        loggers=loggers,
    )
def create_log_manager(context_creation_data):
    check.inst_param(context_creation_data, 'context_creation_data',
                     ContextCreationData)

    pipeline_def, mode_def, environment_config, run_config = (
        context_creation_data.pipeline_def,
        context_creation_data.mode_def,
        context_creation_data.environment_config,
        context_creation_data.run_config,
    )

    loggers = []
    for logger_key, logger_def in mode_def.loggers.items() or default_loggers(
    ).items():
        if logger_key in environment_config.loggers:
            loggers.append(
                logger_def.logger_fn(
                    InitLoggerContext(
                        environment_config.loggers.get(logger_key,
                                                       {}).get('config'),
                        pipeline_def,
                        logger_def,
                        run_config.run_id,
                    )))

    if run_config.loggers:
        for logger in run_config.loggers:
            loggers.append(logger)

    if not loggers:
        for (logger_def, logger_config) in default_system_loggers():
            loggers.append(
                logger_def.logger_fn(
                    InitLoggerContext(logger_config, pipeline_def, logger_def,
                                      run_config.run_id)))

    if run_config.event_callback:
        init_logger_context = InitLoggerContext({}, pipeline_def, logger_def,
                                                run_config.run_id)
        loggers.append(
            construct_event_logger(
                run_config.event_callback).logger_fn(init_logger_context))

    return DagsterLogManager(
        run_id=run_config.run_id,
        logging_tags=get_logging_tags(context_creation_data.run_config,
                                      context_creation_data.pipeline_def),
        loggers=loggers,
    )
Esempio n. 10
0
 def __init__(self,
              name=DEFAULT_MODE_NAME,
              resources=None,
              loggers=None,
              description=None):
     self.name = check.str_param(name, 'name')
     self.resource_defs = check.opt_dict_param(
         resources,
         'resources',
         key_type=str,
         value_type=ResourceDefinition)
     self.loggers = (check.opt_dict_param(
         loggers, 'loggers', key_type=str, value_type=LoggerDefinition)
                     or default_loggers())
     self.description = check.opt_str_param(description, 'description')
Esempio n. 11
0
    def __new__(
        cls,
        name=None,
        resource_defs=None,
        logger_defs=None,
        executor_defs=None,
        description=None,
        intermediate_storage_defs=None,
    ):
        from dagster.core.storage.system_storage import default_intermediate_storage_defs

        from .intermediate_storage import IntermediateStorageDefinition

        check.opt_dict_param(resource_defs,
                             "resource_defs",
                             key_type=str,
                             value_type=ResourceDefinition)
        if resource_defs and "io_manager" in resource_defs:
            resource_defs_with_defaults = resource_defs
        else:
            from dagster.core.storage.mem_io_manager import mem_io_manager

            resource_defs_with_defaults = merge_dicts(
                {"io_manager": mem_io_manager}, resource_defs or {})

        return super(ModeDefinition, cls).__new__(
            cls,
            name=check_valid_name(name) if name else DEFAULT_MODE_NAME,
            resource_defs=resource_defs_with_defaults,
            loggers=(check.opt_dict_param(logger_defs,
                                          "logger_defs",
                                          key_type=str,
                                          value_type=LoggerDefinition)
                     or default_loggers()),
            intermediate_storage_defs=check.list_param(
                intermediate_storage_defs if intermediate_storage_defs else
                default_intermediate_storage_defs,
                "intermediate_storage_defs",
                of_type=IntermediateStorageDefinition,
            ),
            executor_defs=check.list_param(
                executor_defs if executor_defs else default_executors,
                "executor_defs",
                of_type=ExecutorDefinition,
            ),
            description=check.opt_str_param(description, "description"),
        )
def create_log_manager(context_creation_data):
    check.inst_param(context_creation_data, 'context_creation_data', ContextCreationData)

    pipeline_def, mode_def, environment_config, run_config = (
        context_creation_data.pipeline_def,
        context_creation_data.mode_def,
        context_creation_data.environment_config,
        context_creation_data.run_config,
    )

    loggers = []
    for logger_key, logger_def in mode_def.loggers.items() or default_loggers().items():
        if logger_key in environment_config.loggers:
            loggers.append(
                logger_def.logger_fn(
                    InitLoggerContext(
                        environment_config.loggers.get(logger_key, {}).get('config'),
                        pipeline_def,
                        logger_def,
                        run_config.run_id,
                    )
                )
            )

    if not loggers:
        for (logger_def, logger_config) in default_system_loggers():
            loggers.append(
                logger_def.logger_fn(
                    InitLoggerContext(logger_config, pipeline_def, logger_def, run_config.run_id)
                )
            )

    # should this be first in loggers list?
    loggers.append(context_creation_data.instance.get_event_listener())

    return DagsterLogManager(
        run_id=run_config.run_id,
        logging_tags=get_logging_tags(
            context_creation_data.run_config, context_creation_data.pipeline_def
        ),
        loggers=loggers,
    )
Esempio n. 13
0
def _create_loggers(environment_config, run_config, pipeline_def, mode_def):
    check.inst_param(environment_config, 'environment_config', EnvironmentConfig)
    check.inst_param(run_config, 'run_config', RunConfig)
    check.inst_param(pipeline_def, 'pipeline_def', PipelineDefinition)
    check.inst_param(mode_def, 'mode_def', ModeDefinition)

    loggers = []
    for logger_key, logger_def in mode_def.loggers.items() or default_loggers().items():
        if logger_key in environment_config.loggers:
            loggers.append(
                logger_def.logger_fn(
                    InitLoggerContext(
                        environment_config.loggers.get(logger_key, {}).get('config'),
                        pipeline_def,
                        logger_def,
                        run_config.run_id,
                    )
                )
            )

    if run_config.loggers:
        for logger in run_config.loggers:
            loggers.append(logger)

    if not loggers:
        for (logger_def, logger_config) in default_system_loggers():
            loggers.append(
                logger_def.logger_fn(
                    InitLoggerContext(logger_config, pipeline_def, logger_def, run_config.run_id)
                )
            )

    if run_config.event_callback:
        init_logger_context = InitLoggerContext({}, pipeline_def, logger_def, run_config.run_id)
        loggers.append(
            construct_event_logger(run_config.event_callback).logger_fn(init_logger_context)
        )

    return loggers
Esempio n. 14
0
    def __new__(
        cls,
        name=None,
        resource_defs=None,
        logger_defs=None,
        system_storage_defs=None,
        executor_defs=None,
        description=None,
    ):
        from dagster.core.storage.system_storage import default_system_storage_defs

        from .system_storage import SystemStorageDefinition

        return super(ModeDefinition, cls).__new__(
            cls,
            name=check.opt_str_param(name, 'name', DEFAULT_MODE_NAME),
            resource_defs=check.opt_dict_param(resource_defs,
                                               'resource_defs',
                                               key_type=str,
                                               value_type=ResourceDefinition),
            loggers=(check.opt_dict_param(logger_defs,
                                          'logger_defs',
                                          key_type=str,
                                          value_type=LoggerDefinition)
                     or default_loggers()),
            system_storage_defs=check.list_param(
                system_storage_defs
                if system_storage_defs else default_system_storage_defs,
                'system_storage_defs',
                of_type=SystemStorageDefinition,
            ),
            executor_defs=check.list_param(
                executor_defs if executor_defs else default_executors,
                'executor_defs',
                of_type=ExecutorDefinition,
            ),
            description=check.opt_str_param(description, 'description'),
        )
Esempio n. 15
0
    def __new__(
        cls,
        name: Optional[str] = None,
        resource_defs: Optional[Dict[str, ResourceDefinition]] = None,
        logger_defs: Optional[Dict[str, LoggerDefinition]] = None,
        executor_defs: Optional[List[ExecutorDefinition]] = None,
        description: Optional[str] = None,
        intermediate_storage_defs: Optional[
            List["IntermediateStorageDefinition"]] = None,
        _config_mapping: Optional[ConfigMapping] = None,
        _partitioned_config: Optional["PartitionedConfig"] = None,
    ):
        from dagster.core.storage.system_storage import default_intermediate_storage_defs

        from .intermediate_storage import IntermediateStorageDefinition
        from .partition import PartitionedConfig

        check.opt_dict_param(resource_defs,
                             "resource_defs",
                             key_type=str,
                             value_type=ResourceDefinition)
        if resource_defs and "io_manager" in resource_defs:
            resource_defs_with_defaults = resource_defs
        else:
            from dagster.core.storage.mem_io_manager import mem_io_manager

            resource_defs_with_defaults = merge_dicts(
                {"io_manager": mem_io_manager}, resource_defs or {})

        if _config_mapping:
            experimental_arg_warning("_config_mapping",
                                     "ModeDefinition.__new__")

        if _partitioned_config:
            experimental_arg_warning("_partitioned_config",
                                     "ModeDefinition.__new__")

        return super(ModeDefinition, cls).__new__(
            cls,
            name=check_valid_name(name) if name else DEFAULT_MODE_NAME,
            resource_defs=resource_defs_with_defaults,
            loggers=(check.opt_dict_param(logger_defs,
                                          "logger_defs",
                                          key_type=str,
                                          value_type=LoggerDefinition)
                     or default_loggers()),
            intermediate_storage_defs=check.list_param(
                intermediate_storage_defs if intermediate_storage_defs else
                default_intermediate_storage_defs,
                "intermediate_storage_defs",
                of_type=IntermediateStorageDefinition,
            ),
            executor_defs=check.list_param(
                executor_defs if executor_defs else default_executors,
                "executor_defs",
                of_type=ExecutorDefinition,
            ),
            description=check.opt_str_param(description, "description"),
            config_mapping=check.opt_inst_param(_config_mapping,
                                                "_config_mapping",
                                                ConfigMapping),
            partitioned_config=check.opt_inst_param(_partitioned_config,
                                                    "_partitioned_config",
                                                    PartitionedConfig),
        )