コード例 #1
0
def test_logging():
    records = []
    critical_records = []

    pipeline_def = PipelineDefinition(
        name='hello_logging_pipeline',
        solids=[define_hello_logging_solid()],
        mode_definitions=[
            ModeDefinition(
                loggers={
                    'test':
                    construct_single_handler_logger('test', 'debug',
                                                    LogTestHandler(records)),
                    'critical':
                    construct_single_handler_logger(
                        'critical', 'critical', LogTestHandler(
                            critical_records)),
                })
        ],
    )

    execute_pipeline(pipeline_def, {'loggers': {'test': {}, 'critical': {}}})

    messages = [x.dagster_meta['orig_message'] for x in records]

    assert 'Hello, there!' in messages

    critical_messages = [
        x.dagster_meta['orig_message'] for x in critical_records
    ]

    assert 'Hello, there!' not in critical_messages
コード例 #2
0
def test_log_level_filtering():
    records = []
    critical_records = []

    debug_logger_def = construct_single_handler_logger('debug_handler',
                                                       'debug',
                                                       LogTestHandler(records))
    critical_logger_def = construct_single_handler_logger(
        'critical_handler', 'critical', LogTestHandler(critical_records))

    loggers = [
        logger_def.logger_fn(
            InitLoggerContext({}, PipelineDefinition([]), logger_def, ''))
        for logger_def in [debug_logger_def, critical_logger_def]
    ]

    log_manager = DagsterLogManager('', {}, loggers)

    log_manager.debug('Hello, there!')

    messages = [x.dagster_meta['orig_message'] for x in records]

    assert 'Hello, there!' in messages

    critical_messages = [
        x.dagster_meta['orig_message'] for x in critical_records
    ]

    assert 'Hello, there!' not in critical_messages
コード例 #3
0
def test_log_level_filtering():
    records = []
    critical_records = []

    debug_logger_def = construct_single_handler_logger("debug_handler",
                                                       "debug",
                                                       LogTestHandler(records))
    critical_logger_def = construct_single_handler_logger(
        "critical_handler", "critical", LogTestHandler(critical_records))

    loggers = [
        logger_def.logger_fn(
            InitLoggerContext(
                {},
                logger_def,
                pipeline_def=PipelineDefinition([], "test"),
                run_id="",
            )) for logger_def in [debug_logger_def, critical_logger_def]
    ]

    log_manager = DagsterLogManager.create(loggers=loggers)

    log_manager.debug("Hello, there!")

    messages = [x.dagster_meta["orig_message"] for x in records]

    assert "Hello, there!" in messages

    critical_messages = [
        x.dagster_meta["orig_message"] for x in critical_records
    ]

    assert "Hello, there!" not in critical_messages
コード例 #4
0
def test_error_during_logging(caplog):
    run_id = str(uuid.uuid4())
    with tempfile.NamedTemporaryFile() as sqlite3_db:
        sqlite3_db_path = sqlite3_db.name
        init_db(sqlite3_db_path)

        sqlite3_handler = JsonSqlite3Handler(sqlite3_db_path)

        def err_conn(*args, **kwargs):
            raise Exception('Bailing!')

        sqlite3_handler.connect = err_conn

        sqlite3_logger_def = construct_single_handler_logger(
            'sqlite3', 'debug', sqlite3_handler)
        sqlite3_logger = sqlite3_logger_def.logger_fn(
            dummy_init_logger_context(sqlite3_logger_def, run_id))
        sqlite3_log_manager = DagsterLogManager(run_id, {}, [sqlite3_logger])

        sqlite3_log_manager.info('Testing error handling')

        assert caplog.record_tuples == [
            ('root', 50, 'Error during logging!'),
            ('root', 40, 'Bailing!'),
        ]
コード例 #5
0
def check_thread_target(sqlite3_db_path, is_done, run_id, test_log_records):
    test_handler = LogTestHandler(test_log_records)
    test_logger_def = construct_single_handler_logger('test', 'debug', test_handler)
    test_logger = test_logger_def.logger_fn(dummy_init_logger_context(test_logger_def, run_id))
    test_log_manager = DagsterLogManager(run_id, {}, [test_logger])
    test_log_watcher = JsonSqlite3LogWatcher(sqlite3_db_path, test_log_manager, is_done)
    test_log_watcher.watch()
コード例 #6
0
def test_json_sqlite3_watcher():
    test_log_records = []
    run_id = str(uuid.uuid4())
    with tempfile.NamedTemporaryFile() as sqlite3_db:
        sqlite3_db_path = sqlite3_db.name
        init_db(sqlite3_db_path)

        sqlite3_handler = JsonSqlite3Handler(sqlite3_db_path)
        sqlite3_logger_def = construct_single_handler_logger(
            'sqlite3', 'debug', sqlite3_handler)
        sqlite3_logger = sqlite3_logger_def.logger_fn(
            dummy_init_logger_context(sqlite3_logger_def, run_id))
        sqlite3_log_manager = DagsterLogManager(run_id, {}, [sqlite3_logger])

        for i in range(1000):
            sqlite3_log_manager.info('Testing ' + str(i))

        conn = sqlite3.connect(sqlite3_db_path)
        cursor = conn.cursor()
        count = cursor.execute('select count(1) from logs').fetchall()
        assert count[0][0] == 1000

        is_done = threading.Event()
        is_done.set()

        test_handler = LogTestHandler(test_log_records)
        test_logger_def = construct_single_handler_logger(
            'test', 'debug', test_handler)
        test_logger = test_logger_def.logger_fn(
            dummy_init_logger_context(test_logger_def, run_id))
        sqlite3_watcher_log_manager = DagsterLogManager(
            run_id, {}, [test_logger])
        sqlite3_watcher = JsonSqlite3LogWatcher(sqlite3_db_path,
                                                sqlite3_watcher_log_manager,
                                                is_done)

        sqlite3_watcher.watch()

        assert len(test_log_records) == 1000

        records = cursor.execute('select * from logs').fetchall()
        for i, record in enumerate(records):
            json_record = record[1]
            assert json_record == seven.json.dumps(
                test_log_records[i].__dict__)
コード例 #7
0
def forward_logs(event_sink, wrap_it_up, run_id, test_log_records):
    test_handler = LogTestHandler(test_log_records)
    test_logger_def = construct_single_handler_logger('test', 'debug',
                                                      test_handler)
    test_logger = test_logger_def.logger_fn(
        dummy_init_logger_context(test_logger_def, run_id))
    test_log_manager = DagsterLogManager(run_id, {}, [test_logger])
    with event_sink.log_forwarding(test_log_manager):
        while not wrap_it_up.is_set():
            time.sleep(0.01)
コード例 #8
0
def thread_target_source(sqlite3_db_path, run_id):
    sqlite3_handler = JsonSqlite3Handler(sqlite3_db_path)
    sqlite3_logger_def = construct_single_handler_logger('sqlite3', 'debug', sqlite3_handler)
    sqlite3_logger = sqlite3_logger_def.logger_fn(
        dummy_init_logger_context(sqlite3_logger_def, run_id)
    )
    sqlite3_log_manager = DagsterLogManager(run_id, {}, [sqlite3_logger])

    for i in range(1000):
        sqlite3_log_manager.info('Testing ' + str(i))
コード例 #9
0
ファイル: log.py プロジェクト: trevenrawr/dagster
def construct_event_logger(event_record_callback):
    """
    Callback receives a stream of event_records. Piggybacks on the logging machinery.
    """
    check.callable_param(event_record_callback, "event_record_callback")

    return construct_single_handler_logger(
        "event-logger",
        "debug",
        StructuredLoggerHandler(lambda logger_message: event_record_callback(
            construct_event_record(logger_message))),
    )
コード例 #10
0
def construct_event_logger(event_record_callback):
    '''
    Callback receives a stream of event_records. Piggybacks on the logging machinery.
    '''
    check.callable_param(event_record_callback, 'event_record_callback')

    return construct_single_handler_logger(
        'event-logger',
        'debug',
        StructuredLoggerHandler(lambda logger_message: event_record_callback(
            construct_event_record(logger_message))),
    )
コード例 #11
0
ファイル: log.py プロジェクト: trevenrawr/dagster
def construct_json_event_logger(json_path):
    """Record a stream of event records to json"""
    check.str_param(json_path, "json_path")
    return construct_single_handler_logger(
        "json-event-record-logger",
        "debug",
        JsonEventLoggerHandler(
            json_path,
            lambda record: construct_event_record(
                StructuredLoggerMessage(
                    name=record.name,
                    message=record.msg,
                    level=record.levelno,
                    meta=record.dagster_meta,
                    record=record,
                )),
        ),
    )
コード例 #12
0
def test_json_sqlite3_handler():
    run_id = str(uuid.uuid4())
    with tempfile.NamedTemporaryFile() as sqlite3_db:
        sqlite3_db_path = sqlite3_db.name
        init_db(sqlite3_db_path)

        sqlite3_handler = JsonSqlite3Handler(sqlite3_db_path)
        sqlite3_logger_def = construct_single_handler_logger(
            'sqlite3', 'debug', sqlite3_handler)
        sqlite3_logger = sqlite3_logger_def.logger_fn(
            dummy_init_logger_context(sqlite3_logger_def, run_id))
        sqlite3_log_manager = DagsterLogManager(run_id, {}, [sqlite3_logger])

        for i in range(1000):
            sqlite3_log_manager.info('Testing ' + str(i))

        conn = sqlite3.connect(sqlite3_db_path)
        cursor = conn.cursor()
        count = cursor.execute('select count(1) from logs').fetchall()
        assert count[0][0] == 1000
コード例 #13
0
def test_sink_log_forwarding():
    test_log_records = []
    run_id = str(uuid.uuid4())
    with safe_tempfile_path() as sqlite3_db_path:
        sink = SqliteEventSink(sqlite3_db_path)

        sqlite3_log_manager = DagsterLogManager(run_id, {},
                                                [sink.get_logger()])

        for i in range(1000):
            sqlite3_log_manager.info('Testing ' + str(i))

        with sqlite3.connect(sqlite3_db_path) as conn:
            cursor = conn.cursor()
            count = cursor.execute('select count(1) from logs').fetchall()
            assert count[0][0] == 1000

            test_handler = LogTestHandler(test_log_records)
            test_logger_def = construct_single_handler_logger(
                'test', 'debug', test_handler)
            test_logger = test_logger_def.logger_fn(
                dummy_init_logger_context(test_logger_def, run_id))
            sqlite3_watcher_log_manager = DagsterLogManager(
                run_id, {}, [test_logger])

            with sink.log_forwarding(sqlite3_watcher_log_manager):
                pass

            assert len(test_log_records) == 1000

            records = cursor.execute('select * from logs').fetchall()
            for i, record in enumerate(records):
                json_record = record[1]
                assert json_record == seven.json.dumps(
                    test_log_records[i].__dict__)

        conn.close()
        sink.on_pipeline_teardown()
コード例 #14
0
def construct_sqlite_logger(sqlite_db_path):
    return construct_single_handler_logger('dagstermill', 'debug',
                                           JsonSqlite3Handler(sqlite_db_path))