def colored_console_logger(init_context): level = coerce_valid_log_level(init_context.logger_config["log_level"]) name = init_context.logger_config["name"] klass = logging.getLoggerClass() logger_ = klass(name, level=level) coloredlogs.install( logger=logger_, level=level, fmt=default_format_string(), field_styles={ "levelname": { "color": "blue" }, "asctime": { "color": "green" } }, level_styles={ "debug": {}, "error": { "color": "red" } }, ) return logger_
def colored_console_logger(init_context): level = coerce_valid_log_level(init_context.logger_config['log_level']) name = init_context.logger_config['name'] klass = logging.getLoggerClass() logger_ = klass(name, level=level) coloredlogs.install( logger=logger_, level=level, fmt=default_format_string(), field_styles={ 'levelname': { 'color': 'blue' }, 'asctime': { 'color': 'green' } }, level_styles={ 'debug': {}, 'error': { 'color': 'red' } }, ) return logger_
def foo_logger(init_context): logger_ = logging.Logger('foo') logger_.log = lambda level, msg, **kwargs: foo_logger_captured_results.append( (level, msg)) logger_.setLevel( coerce_valid_log_level(init_context.logger_config['log_level'])) return logger_
def bar_logger(init_context): logger_ = logging.Logger("bar") logger_.log = lambda level, msg, **kwargs: bar_logger_captured_results.append( (level, msg)) logger_.setLevel( coerce_valid_log_level(init_context.logger_config["log_level"])) return logger_
def __new__( cls, error_info, message, level, user_message, run_id, timestamp, step_key=None, pipeline_name=None, dagster_event=None, ): return super(EventRecord, cls).__new__( cls, check.opt_inst_param(error_info, 'error_info', SerializableErrorInfo), check.str_param(message, 'message'), coerce_valid_log_level(level), check.str_param(user_message, 'user_message'), check.str_param(run_id, 'run_id'), check.float_param(timestamp, 'timestamp'), check.opt_str_param(step_key, 'step_key'), check.opt_str_param(pipeline_name, 'pipeline_name'), check.opt_inst_param(dagster_event, 'dagster_event', DagsterEvent), )
def __new__( cls, error_info, message, level, user_message, run_id, timestamp, step_key=None, pipeline_name=None, dagster_event=None, ): return super(EventRecord, cls).__new__( cls, check.opt_inst_param(error_info, "error_info", SerializableErrorInfo), check.str_param(message, "message"), coerce_valid_log_level(level), check.str_param(user_message, "user_message"), check.str_param(run_id, "run_id"), check.float_param(timestamp, "timestamp"), check.opt_str_param(step_key, "step_key"), check.opt_str_param(pipeline_name, "pipeline_name"), check.opt_inst_param(dagster_event, "dagster_event", DagsterEvent), )
def define_structured_logger(name, callback, level): check.str_param(name, 'name') check.callable_param(callback, 'callback') level = coerce_valid_log_level(level) return construct_single_handler_logger(name, level, StructuredLoggerHandler(callback))
def test_single_step_resource_event_logs(): # Test to attribute logs for single-step plans which are often the representation of # sub-plans in a multiprocessing execution environment. Most likely will need to be rewritten # with the refactor detailed in https://github.com/dagster-io/dagster/issues/2239 USER_SOLID_MESSAGE = "I AM A SOLID" USER_RESOURCE_MESSAGE = "I AM A RESOURCE" events = [] def event_callback(record): assert isinstance(record, EventRecord) events.append(record) @solid(required_resource_keys={"a"}) def resource_solid(context): context.log.info(USER_SOLID_MESSAGE) @resource def resource_a(context): context.log.info(USER_RESOURCE_MESSAGE) return "A" the_pipeline = PipelineDefinition( name="resource_logging_pipeline", solid_defs=[resource_solid], mode_defs=[ ModeDefinition( resource_defs={"a": resource_a}, logger_defs={ "callback": construct_event_logger(event_callback) }, ) ], ) with instance_for_test() as instance: pipeline_run = instance.create_run_for_pipeline( the_pipeline, run_config={"loggers": { "callback": {} }}, step_keys_to_execute=["resource_solid"], ) result = execute_run(InMemoryPipeline(the_pipeline), pipeline_run, instance) assert result.success log_messages = [ event for event in events if isinstance(event, EventRecord) and event.level == coerce_valid_log_level("INFO") ] assert len(log_messages) == 2 resource_log_message = next( iter([ message for message in log_messages if message.user_message == USER_RESOURCE_MESSAGE ])) assert resource_log_message.step_key == "resource_solid"
def define_json_file_logger(name, json_path, level): check.str_param(name, 'name') check.str_param(json_path, 'json_path') level = coerce_valid_log_level(level) stream_handler = JsonFileHandler(json_path) stream_handler.setFormatter(define_default_formatter()) return construct_single_handler_logger(name, level, stream_handler)
def __new__(cls, name, message, level, meta, record): return super(StructuredLoggerMessage, cls).__new__( cls, check.str_param(name, 'name'), check.str_param(message, 'message'), coerce_valid_log_level(level), check.dict_param(meta, 'meta'), check.inst_param(record, 'record', logging.LogRecord), )
def colored_console_logger(init_context): level = coerce_valid_log_level(init_context.logger_config['log_level']) name = init_context.logger_config['name'] klass = logging.getLoggerClass() logger_ = klass(name, level=level) coloredlogs.install(logger=logger_, level=level, fmt=default_format_string()) return logger_
def bar_logger(init_context): class BarLogger(logging.Logger): def __init__(self, name, prefix, *args, **kwargs): self.prefix = prefix super(BarLogger, self).__init__(name, *args, **kwargs) def log(self, lvl, msg, *args, **kwargs): # pylint: disable=arguments-differ msg = self.prefix + msg super(BarLogger, self).log(lvl, msg, *args, **kwargs) logger_ = BarLogger("bar", init_context.logger_config["prefix"]) logger_.setLevel(coerce_valid_log_level(init_context.logger_config["log_level"]))
def cloudwatch_logger(init_context): """This logger provides support for sending Dagster logs to AWS CloudWatch. Example: .. code-block:: python from dagster import ModeDefinition, execute_pipeline, pipeline, solid from dagster_aws.cloudwatch import cloudwatch_logger @solid def hello_cloudwatch(context): context.log.info('Hello, Cloudwatch!') context.log.error('This is an error') @pipeline(mode_defs=[ModeDefinition(logger_defs={'cloudwatch': cloudwatch_logger})]) def hello_cloudwatch_pipeline(): hello_cloudwatch() execute_pipeline( hello_cloudwatch_pipeline, { 'loggers': { 'cloudwatch': { 'config': { 'log_group_name': '/dagster-test/test-cloudwatch-logging', 'log_stream_name': 'test-logging', 'aws_region': 'us-west-1' } } } }, ) """ level = coerce_valid_log_level(init_context.logger_config["log_level"]) name = init_context.logger_config["name"] klass = logging.getLoggerClass() logger_ = klass(name, level=level) logger_.addHandler( CloudwatchLogsHandler( init_context.logger_config["log_group_name"], init_context.logger_config["log_stream_name"], aws_region=init_context.logger_config.get("aws_region"), aws_secret_access_key=init_context.logger_config.get( "aws_secret_access_key"), aws_access_key_id=init_context.logger_config.get( "aws_access_key_id"), )) return logger_
def construct_single_handler_logger(name, level, handler): check.str_param(name, 'name') check.inst_param(handler, 'handler', logging.Handler) level = coerce_valid_log_level(level) @logger def single_handler_logger(_init_context): klass = logging.getLoggerClass() logger_ = klass(name, level=level) logger_.addHandler(handler) handler.setLevel(level) return logger_ return single_handler_logger
def json_console_logger(init_context): level = coerce_valid_log_level(init_context.logger_config['log_level']) name = init_context.logger_config['name'] klass = logging.getLoggerClass() logger_ = klass(name, level=level) handler = coloredlogs.StandardErrorHandler() class JsonFormatter(logging.Formatter): def format(self, record): return seven.json.dumps(record.__dict__) handler.setFormatter(JsonFormatter()) logger_.addHandler(handler) return logger_
def cloudwatch_logger(init_context): level = coerce_valid_log_level(init_context.logger_config['log_level']) name = init_context.logger_config['name'] klass = logging.getLoggerClass() logger_ = klass(name, level=level) logger_.addHandler( CloudwatchLogsHandler( init_context.logger_config['log_group_name'], init_context.logger_config['log_stream_name'], aws_region=init_context.logger_config.get('aws_region'), aws_secret_access_key=init_context.logger_config.get( 'aws_secret_access_key'), aws_access_key_id=init_context.logger_config.get( 'aws_access_key_id'), )) return logger_
def __init__( self, error_info, message, level, user_message, run_id, timestamp, step_key=None, pipeline_name=None, dagster_event=None, ): from dagster.core.events import DagsterEvent self._error_info = check.opt_inst_param(error_info, 'error_info', SerializableErrorInfo) self._message = check.str_param(message, 'message') self._level = coerce_valid_log_level(level) self._user_message = check.str_param(user_message, 'user_message') self._run_id = check.str_param(run_id, 'run_id') self._timestamp = check.float_param(timestamp, 'timestamp') self._step_key = check.opt_str_param(step_key, 'step_key') self._pipeline_name = check.opt_str_param(pipeline_name, 'pipeline_name') self._dagster_event = check.opt_inst_param(dagster_event, 'dagster_event', DagsterEvent)
def test_logger(init_context): assert init_context.logger_config == "secret testing value!!" it["ran"] = True logger_ = logging.Logger("test", level=coerce_valid_log_level("INFO")) return logger_
def test_logger(init_context): assert init_context.logger_config["enum"] == TestPythonEnum.OTHER it["ran test_logger"] = True logger_ = logging.Logger("test", level=coerce_valid_log_level("INFO")) return logger_
def test_logger(init_context): assert init_context.logger_config['enum'] == TestPythonEnum.OTHER it['ran test_logger'] = True logger_ = logging.Logger('test', level=coerce_valid_log_level('INFO')) return logger_
def foo_logger(init_context): logger_ = logging.Logger("foo") logger_.setLevel(coerce_valid_log_level(init_context.logger_config)) return logger_
def test_logger(init_context): assert init_context.logger_config == 'secret testing value!!' it['ran'] = True logger_ = logging.Logger('test', level=coerce_valid_log_level('INFO')) return logger_