def test_error_during_logging(caplog): run_id = str(uuid.uuid4()) with tempfile.NamedTemporaryFile() as sqlite3_db: sqlite3_db_path = sqlite3_db.name init_db(sqlite3_db_path) sqlite3_handler = JsonSqlite3Handler(sqlite3_db_path) def err_conn(*args, **kwargs): raise Exception('Bailing!') sqlite3_handler.connect = err_conn sqlite3_logger_def = construct_single_handler_logger( 'sqlite3', 'debug', sqlite3_handler) sqlite3_logger = sqlite3_logger_def.logger_fn( dummy_init_logger_context(sqlite3_logger_def, run_id)) sqlite3_log_manager = DagsterLogManager(run_id, {}, [sqlite3_logger]) sqlite3_log_manager.info('Testing error handling') assert caplog.record_tuples == [ ('root', 50, 'Error during logging!'), ('root', 40, 'Bailing!'), ]
def thread_target_source(sqlite3_db_path, run_id): sqlite3_handler = JsonSqlite3Handler(sqlite3_db_path) sqlite3_logger_def = construct_single_handler_logger('sqlite3', 'debug', sqlite3_handler) sqlite3_logger = sqlite3_logger_def.logger_fn( dummy_init_logger_context(sqlite3_logger_def, run_id) ) sqlite3_log_manager = DagsterLogManager(run_id, {}, [sqlite3_logger]) for i in range(1000): sqlite3_log_manager.info('Testing ' + str(i))
def test_json_sqlite3_watcher(): test_log_records = [] run_id = str(uuid.uuid4()) with tempfile.NamedTemporaryFile() as sqlite3_db: sqlite3_db_path = sqlite3_db.name init_db(sqlite3_db_path) sqlite3_handler = JsonSqlite3Handler(sqlite3_db_path) sqlite3_logger_def = construct_single_handler_logger( 'sqlite3', 'debug', sqlite3_handler) sqlite3_logger = sqlite3_logger_def.logger_fn( dummy_init_logger_context(sqlite3_logger_def, run_id)) sqlite3_log_manager = DagsterLogManager(run_id, {}, [sqlite3_logger]) for i in range(1000): sqlite3_log_manager.info('Testing ' + str(i)) conn = sqlite3.connect(sqlite3_db_path) cursor = conn.cursor() count = cursor.execute('select count(1) from logs').fetchall() assert count[0][0] == 1000 is_done = threading.Event() is_done.set() test_handler = LogTestHandler(test_log_records) test_logger_def = construct_single_handler_logger( 'test', 'debug', test_handler) test_logger = test_logger_def.logger_fn( dummy_init_logger_context(test_logger_def, run_id)) sqlite3_watcher_log_manager = DagsterLogManager( run_id, {}, [test_logger]) sqlite3_watcher = JsonSqlite3LogWatcher(sqlite3_db_path, sqlite3_watcher_log_manager, is_done) sqlite3_watcher.watch() assert len(test_log_records) == 1000 records = cursor.execute('select * from logs').fetchall() for i, record in enumerate(records): json_record = record[1] assert json_record == seven.json.dumps( test_log_records[i].__dict__)
def test_json_sqlite3_handler(): run_id = str(uuid.uuid4()) with tempfile.NamedTemporaryFile() as sqlite3_db: sqlite3_db_path = sqlite3_db.name init_db(sqlite3_db_path) sqlite3_handler = JsonSqlite3Handler(sqlite3_db_path) sqlite3_logger_def = construct_single_handler_logger( 'sqlite3', 'debug', sqlite3_handler) sqlite3_logger = sqlite3_logger_def.logger_fn( dummy_init_logger_context(sqlite3_logger_def, run_id)) sqlite3_log_manager = DagsterLogManager(run_id, {}, [sqlite3_logger]) for i in range(1000): sqlite3_log_manager.info('Testing ' + str(i)) conn = sqlite3.connect(sqlite3_db_path) cursor = conn.cursor() count = cursor.execute('select count(1) from logs').fetchall() assert count[0][0] == 1000