Пример #1
0
def test_log_level_filtering():
    records = []
    critical_records = []

    debug_logger_def = construct_single_handler_logger('debug_handler',
                                                       'debug',
                                                       LogTestHandler(records))
    critical_logger_def = construct_single_handler_logger(
        'critical_handler', 'critical', LogTestHandler(critical_records))

    loggers = [
        logger_def.logger_fn(
            InitLoggerContext({}, PipelineDefinition([]), logger_def, ''))
        for logger_def in [debug_logger_def, critical_logger_def]
    ]

    log_manager = DagsterLogManager('', {}, loggers)

    log_manager.debug('Hello, there!')

    messages = [x.dagster_meta['orig_message'] for x in records]

    assert 'Hello, there!' in messages

    critical_messages = [
        x.dagster_meta['orig_message'] for x in critical_records
    ]

    assert 'Hello, there!' not in critical_messages
Пример #2
0
def test_log_level_filtering():
    records = []
    critical_records = []

    debug_logger_def = construct_single_handler_logger("debug_handler",
                                                       "debug",
                                                       LogTestHandler(records))
    critical_logger_def = construct_single_handler_logger(
        "critical_handler", "critical", LogTestHandler(critical_records))

    loggers = [
        logger_def.logger_fn(
            InitLoggerContext(
                {},
                logger_def,
                pipeline_def=PipelineDefinition([], "test"),
                run_id="",
            )) for logger_def in [debug_logger_def, critical_logger_def]
    ]

    log_manager = DagsterLogManager.create(loggers=loggers)

    log_manager.debug("Hello, there!")

    messages = [x.dagster_meta["orig_message"] for x in records]

    assert "Hello, there!" in messages

    critical_messages = [
        x.dagster_meta["orig_message"] for x in critical_records
    ]

    assert "Hello, there!" not in critical_messages
Пример #3
0
def test_colored_console_logger_with_integer_log_level():
    @pipeline
    def pipe():
        pass

    colored_console_logger.logger_fn(
        InitLoggerContext({"name": "dagster", "log_level": 4}, pipe, colored_console_logger, "")
    )
Пример #4
0
def test_colored_console_logger_with_integer_log_level():
    @pipeline
    def pipe():
        pass

    colored_console_logger.logger_fn(
        InitLoggerContext({
            'name': 'dagster',
            'log_level': 4
        }, pipe, colored_console_logger, ''))
Пример #5
0
def setup_json_file_logger(tf_name, name="foo", level=logging.DEBUG):
    logger_def = define_json_file_logger(name, tf_name, level)
    init_logger_context = InitLoggerContext(
        {},
        logger_def,
        pipeline_def=PipelineDefinition([], "test"),
        run_id="",
    )

    return logger_def.logger_fn(init_logger_context)
Пример #6
0
def initialize_console_manager(
        pipeline_run: Optional[PipelineRun]) -> DagsterLogManager:
    # initialize default colored console logger
    loggers = []
    for logger_def, logger_config in default_system_loggers():
        loggers.append(
            logger_def.logger_fn(
                InitLoggerContext(
                    logger_config,
                    logger_def,
                    run_id=pipeline_run.run_id if pipeline_run else None)))
    return DagsterLogManager(
        None, pipeline_run.tags if pipeline_run and pipeline_run.tags else {},
        loggers)
Пример #7
0
def test_construct_event_record():
    with construct_structured_logger(construct_event_record) as (logger_def, messages):

        init_logger_context = InitLoggerContext({}, PipelineDefinition([]), logger_def, '')
        logger = logger_def.logger_fn(init_logger_context)

        context = create_test_pipeline_execution_context(
            run_config_loggers=[logger], tags={'pipeline': 'some_pipeline'}
        )
        context.log.info('random message')

        assert len(messages) == 1
        message = messages[0]
        assert isinstance(message, LogMessageRecord)
Пример #8
0
def test_colored_console_logger_with_integer_log_level():
    colored_console_logger.logger_fn(
        InitLoggerContext({
            'name': 'dagster',
            'log_level': 4
        }, PipelineDefinition([]), colored_console_logger, ''))
Пример #9
0
    def populate_context(
        self,
        run_id=None,
        mode=None,
        solid_def_name=None,
        pipeline_name=None,
        marshal_dir=None,
        environment_config=None,
        input_name_type_dict=None,
        output_name_type_dict=None,
        output_log_path=None,
        **_kwargs
    ):
        check.str_param(run_id, 'run_id')
        check.str_param(mode, 'mode')
        check.str_param(solid_def_name, 'solid_def_name')
        check.str_param(pipeline_name, 'pipeline_name')
        check.str_param(marshal_dir, 'marshal_dir')
        check.dict_param(environment_config, 'environment_config')
        check.dict_param(input_name_type_dict, 'input_name_type_dict')
        check.dict_param(output_name_type_dict, 'output_name_type_dict')
        check.str_param(output_log_path, 'output_log_path')

        self.populated_by_papermill = True
        self.solid_def_name = solid_def_name
        self.marshal_dir = marshal_dir

        logger_def = construct_logger(output_log_path)
        loggers = {'dagstermill': logger_def}

        if self.repository_def is None:
            self.solid_def = None
            self.pipeline_def = PipelineDefinition(
                [],
                mode_definitions=[ModeDefinition(loggers=loggers)],
                name='Dummy Pipeline (No Repo Registration)',
            )
            self.input_name_type_dict = dict_to_enum(input_name_type_dict)
            self.output_name_type_dict = dict_to_enum(output_name_type_dict)
            for _, runtime_type_enum in self.input_name_type_dict.items():
                if runtime_type_enum == SerializableRuntimeType.NONE:
                    raise DagstermillError(
                        'If Dagstermill solids have inputs that require serialization strategies '
                        'that are not pickling, then you must register a repository within '
                        'notebook by calling dagstermill.register_repository(repository_def)'
                    )
            for _, runtime_type_enum in self.output_name_type_dict.items():
                if runtime_type_enum == SerializableRuntimeType.NONE:
                    raise DagstermillError(
                        'If Dagstermill solids have outputs that require serialization strategies '
                        'that are not pickling, then you must register a repository within '
                        'notebook by calling dagstermill.register_repository(repository_def).'
                    )
            environment_config = {'loggers': {'dagstermill': {}}}
            run_config = RunConfig(run_id=run_id, mode=mode)

        else:
            self.pipeline_def = self.repository_def.get_pipeline(pipeline_name)
            check.invariant(
                self.pipeline_def.has_solid_def(solid_def_name),
                'solid {} not found'.format(solid_def_name),
            )
            self.solid_def = self.pipeline_def.solid_def_named(solid_def_name)

            logger = logger_def.logger_fn(
                InitLoggerContext({}, self.pipeline_def, logger_def, run_id)
            )

            run_config = RunConfig(run_id, loggers=[logger], mode=mode)

        with scoped_pipeline_context(
            self.pipeline_def,
            environment_config,
            run_config,
            scoped_resources_builder_cm=self.setup_resources,
        ) as pipeline_context:
            self.context = DagstermillInNotebookExecutionContext(pipeline_context)

        return self.context
Пример #10
0
def dummy_init_logger_context(logger_def, run_id):
    return InitLoggerContext({}, PipelineDefinition([]), logger_def, run_id)
Пример #11
0
    def populate_context(
        self,
        run_id,
        mode,
        solid_def_name,
        pipeline_def_name,
        marshal_dir,
        environment_dict,
        input_name_type_dict,
        output_name_type_dict,
        output_log_path,
    ):
        check.dict_param(environment_dict, 'environment_dict')
        self.populated_by_papermill = True
        self.solid_def_name = solid_def_name
        self.marshal_dir = marshal_dir

        logger_def = construct_logger(output_log_path)
        loggers = {'dagstermill': logger_def}

        if self.repository_def is None:
            self.solid_def = None
            self.pipeline_def = PipelineDefinition(
                [],
                mode_definitions=[ModeDefinition(loggers=loggers)],
                name='Dummy Pipeline (No Repo Registration)',
            )
            self.input_name_type_dict = dict_to_enum(input_name_type_dict)
            self.output_name_type_dict = dict_to_enum(output_name_type_dict)
            for _, runtime_type_enum in self.input_name_type_dict.items():
                if runtime_type_enum == SerializableRuntimeType.NONE:
                    raise DagstermillError(
                        'If Dagstermill solids have inputs that require serialization strategies '
                        'that are not pickling, then you must register a repository within '
                        'notebook by calling dm.register_repository(repository_def)'
                    )
            for _, runtime_type_enum in self.output_name_type_dict.items():
                if runtime_type_enum == SerializableRuntimeType.NONE:
                    raise DagstermillError(
                        'If Dagstermill solids have outputs that require serialization strategies '
                        'that are not pickling, then you must register a repository within '
                        'notebook by calling dm.register_repository(repository_def).'
                    )
            with scoped_pipeline_context(
                    self.pipeline_def,
                {'loggers': {
                    'dagstermill': {}
                }},
                    RunConfig(run_id=run_id, mode=mode),
            ) as pipeline_context:
                self.context = DagstermillInNotebookExecutionContext(
                    pipeline_context)
        else:
            self.pipeline_def = self.repository_def.get_pipeline(
                pipeline_def_name)
            check.invariant(self.pipeline_def.has_solid_def(solid_def_name))
            self.solid_def = self.pipeline_def.solid_def_named(solid_def_name)

            logger = logger_def.logger_fn(
                InitLoggerContext({}, self.pipeline_def, logger_def, run_id))

            run_config = RunConfig(run_id, loggers=[logger], mode=mode)
            # See block comment above referencing this issue
            # See https://github.com/dagster-io/dagster/issues/796
            with scoped_pipeline_context(self.pipeline_def, environment_dict,
                                         run_config) as pipeline_context:
                self.context = DagstermillInNotebookExecutionContext(
                    pipeline_context)

        return self.context
Пример #12
0
def setup_json_file_logger(tf_name, name='foo', level=logging.DEBUG):
    logger_def = define_json_file_logger(name, tf_name, level)
    init_logger_context = InitLoggerContext({}, PipelineDefinition([]),
                                            logger_def, '')

    return logger_def.logger_fn(init_logger_context)