def test_configure_logging_prod_timestamp(caplog: LogCaptureFixture) -> None: """Test development-mode logging with an added timestamp.""" caplog.set_level(logging.INFO) configure_logging( name="myapp", profile="production", log_level="info", add_timestamp=True, ) logger = structlog.get_logger("myapp") logger = logger.bind(answer=42) logger.info("Hello world") assert caplog.record_tuples[0][0] == "myapp" assert caplog.record_tuples[0][1] == logging.INFO data = json.loads(caplog.record_tuples[0][2]) assert data == { "answer": 42, "event": "Hello world", "logger": "myapp", "severity": "info", "timestamp": ANY, } assert data["timestamp"].endswith("Z") timestamp = datetime.fromisoformat(data["timestamp"][:-1]) timestamp = timestamp.replace(tzinfo=timezone.utc) now = datetime.now(tz=timezone.utc) assert now - timedelta(seconds=5) < timestamp < now
def test_no_signatures_check_signatures( caplog: LogCaptureFixture, docker_registry_secure: DockerRegistrySecure, known_good_image: TypingKnownGoodImage, runner, ): """Test docker-verify can operate on images without existing signatures.""" caplog.clear() caplog.set_level(logging.DEBUG) source = known_good_image["image_name"] destination = source.clone() destination.digest = None destination.tag += __name__ with ca_trust_store(docker_registry_secure.cacerts), registry_credentials( docker_registry_secure): result = runner.invoke( cli, args=[ "--check-signatures", "registry", str(source), str(destination) ], ) assert isinstance(result.exception, SystemExit) assert "does not contain any signatures" in caplog.text assert "is consistent." not in caplog.text
def test_configure_logging_dev_timestamp(caplog: LogCaptureFixture) -> None: """Test development-mode logging with an added timestamp.""" caplog.set_level(logging.INFO) configure_logging( name="myapp", profile="development", log_level="info", add_timestamp=True, ) logger = structlog.get_logger("myapp") logger = logger.bind(answer=42) logger.info("Hello world") assert caplog.record_tuples[0][0] == "myapp" assert caplog.record_tuples[0][1] == logging.INFO match = re.match( (r"(\d+-\d+-\d+T\d+:\d+:[\d.]+Z) \[info\s+\] Hello world \s+" r" \[myapp\] answer=42"), _strip_color(caplog.record_tuples[0][2]), ) assert match isotimestamp = match.group(1) assert isotimestamp.endswith("Z") timestamp = datetime.fromisoformat(isotimestamp[:-1]) timestamp = timestamp.replace(tzinfo=timezone.utc) now = datetime.now(tz=timezone.utc) assert now - timedelta(seconds=5) < timestamp < now
def test_no_signatures_check_signatures( runner, known_good_image_local: TypingKnownGoodImage, caplog: LogCaptureFixture): """Test docker-verify can operate on images without existing signatures.""" caplog.clear() caplog.set_level(logging.DEBUG) source = known_good_image_local["image_name"] destination = deepcopy(source) destination.digest = None destination.tag += "_copy" with insecure_registry(): result = runner.invoke( cli, args=[ "--check-signatures", "registry", str(source), str(destination), ], ) assert isinstance(result.exception, SystemExit) assert "does not contain any signatures" in caplog.text assert "is consistent." not in caplog.text
def test_skip_predictions_to_prevent_loop( self, trained_policy: UnexpecTEDIntentPolicy, model_storage: ModelStorage, resource: Resource, execution_context: ExecutionContext, default_domain: Domain, caplog: LogCaptureFixture, tracker_events: List[Event], should_skip: bool, tmp_path: Path, ): caplog.set_level(logging.DEBUG) loaded_policy = self.persist_and_load_policy( trained_policy, model_storage, resource, execution_context ) precomputations = None tracker = DialogueStateTracker(sender_id="init", slots=default_domain.slots) tracker.update_with_events(tracker_events, default_domain) prediction = loaded_policy.predict_action_probabilities( tracker, default_domain, precomputations ) assert ( "Skipping predictions for UnexpecTEDIntentPolicy" in caplog.text ) == should_skip if should_skip: assert prediction.probabilities == loaded_policy._default_predictions( default_domain )
def test_unauthorized_source( clirunner, caplog: LogCaptureFixture, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage, ): """Test docker-sign can handle incorrect credentials.""" caplog.clear() caplog.set_level(logging.DEBUG) # Using local registry credentials when connecting to dockehub ... with registry_credentials(docker_registry_secure): result = clirunner.invoke( cli, args=[ "registry", "--keyid", gpgsigner.keyid, f"{Indices.DOCKERHUB}/dummy:dummy", str(known_good_image["image_name"]), ], env={"DSV_GPG_DATASTORE": str(gpgsigner.homedir)}, input="\n", ) assert result.exception assert "401" in caplog.text assert "Unauthorized" in caplog.text
def test_retry_database_read_errors( self, database_type: DatabaseType, caplog: LogCaptureFixture, monkeypatch: MonkeyPatch ) -> None: caplog.set_level(logging.DEBUG) database = Database(database_type) mocked_query = Mock(side_effect=[pyodbc.OperationalError("Error 1"), pyodbc.OperationalError("Error 2"), True]) monkeypatch.setattr(Session, "query", mocked_query) @retry_database_read_errors def query_with_retry() -> bool: with database.transaction_context() as session: return session.query() # type: ignore assert query_with_retry() assert mocked_query.call_count == 3 assert ( "Error 'Error 1' occurred while running query_with_retry. Trying again 2 more time(s) in 1 second(s)." in caplog.messages ) assert f"Error during {database} operation, transaction was rolled-back: Error 1" in caplog.messages assert ( "Error 'Error 2' occurred while running query_with_retry. Trying again 1 more time(s) in 1 second(s)." in caplog.messages ) assert f"Error during {database} operation, transaction was rolled-back: Error 2" in caplog.messages
def test_unauthorized_destination( caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage, ): """Test docker-sign can handle incorrect credentials.""" caplog.clear() caplog.set_level(logging.DEBUG) # Using local registry credentials when connecting to dockerhub ... with hybrid_trust_store(docker_registry_secure) as path, ca_trust_store( path), registry_credentials(docker_registry_secure): result = clirunner.invoke( cli, args=[ "registry", "--keyid", gpgsigner.keyid, str(known_good_image["image_name"]), f"{Indices.DOCKERHUB}/dummy:dummy", ], env={"DSV_GPG_DATASTORE": str(gpgsigner.homedir)}, input=f"{gpgsigner.passphrase}\n", ) assert result.exception assert "Integrity check passed." in caplog.text assert "401" in caplog.text assert "Unauthorized" in caplog.text
def test_bad_keyid( caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage, ): """Test docker-sign can handle invalid keyids.""" caplog.clear() caplog.set_level(logging.DEBUG) source = known_good_image["image_name"] destination = source.clone() destination.digest = None destination.tag += __name__ with ca_trust_store(docker_registry_secure.cacerts), registry_credentials( docker_registry_secure): result = clirunner.invoke( cli, args=[ "registry", "--keyid", "invalidkeyid", str(source), str(destination), ], env={"DSV_GPG_DATASTORE": str(gpgsigner.homedir)}, input="invalidpassword\n", ) assert result.exception assert "Integrity check passed." in caplog.text assert "Failed to create signature!" in caplog.text
def test_no_signatures_sign_implicit( caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage, ): """Test docker-sign can sign (implicit) images without existing signatures.""" caplog.clear() caplog.set_level(logging.DEBUG) source = known_good_image["image_name"] destination = source.clone() destination.digest = None destination.tag += __name__ with ca_trust_store(docker_registry_secure.cacerts), registry_credentials( docker_registry_secure): result = clirunner.invoke( cli, args=[ "registry", "--keyid", gpgsigner.keyid, str(source), str(destination), ], env={"DSV_GPG_DATASTORE": str(gpgsigner.homedir)}, input=f"{gpgsigner.passphrase}\n", ) assert not result.exception assert "Integrity check passed." in caplog.text assert "Created new image" in caplog.text assert str(destination) in caplog.text
def test_clean_input_data(caplog: LogCaptureFixture) -> None: caplog.set_level(logging.INFO) test_dsx_data = pd.DataFrame({ "lowlevelcust": ["Account_0", "Account_0"], # TODO FSC-371 Remove lowlevelcust again "projectid": ["Project_123", "Project_123"], "contractid": ["Contract_123", "Contract_123"], "shortid": [12345, 12345], "perioddate": [pd.Timestamp("2020-02-01"), pd.Timestamp("2019-12-01")], "Cost": [1.5, 3.0], "Adjusted History": [-2.0, 1.5], "masterpart": ["mn1234", "mn1234"], }) expected_data = pd.DataFrame({ "Project_ID": ["Project_123", "Project_123"], "Contract_ID": ["Contract_123", "Contract_123"], "Item_ID": [12345, 12345], "Date": [pd.Timestamp("2020-02-01"), pd.Timestamp("2019-12-01")], "Date_YYYYMM": [202002, 201912], "Unit_Cost": [1.5, 3.0], "Order_Cost": [0.0, 4.5], "Order_Quantity": [0.0, 1.5], "Wesco_Master_Number": ["mn1234", "mn1234"], }) pd.testing.assert_frame_equal(expected_data, clean_input_data(test_dsx_data)) assert "Set 1 negative sales to zero" in caplog.messages
def test_start_and_wait(caplog: LogCaptureFixture): """Test if executor await for process to accept connections.""" caplog.set_level(logging.DEBUG, logger="mirakuru") executor = TCPExecutor(NC_COMMAND, "localhost", port=3000, timeout=5) executor.start() assert executor.running() is True executor.stop()
def test_generate_build_rates_features_aggregation_warning(caplog: LogCaptureFixture) -> None: caplog.set_level(logging.WARNING) corrupted_input_build_rates = pd.DataFrame( [ ["Project_1", "Contract_1", "Airframe_1", "Build Rate", datetime(2015, 1, 1), 1.0], ["Project_2", "Contract_1", "Airframe_1", "Build Rate", datetime(2015, 1, 1), 10.0], ["Project_3", "Contract_1", "Airframe_2", "Build Rate", datetime(2016, 1, 1), 2.0], ["Project_1", "Contract_1", "Airframe_1", "Build Rate", datetime(2017, 1, 1), 3.0], ], columns=["Project_ID", "Contract_ID", "Airframe", "Periodic_Data_Stream", "Date", "Value"], ) internal_features, exogenous_features = generate_build_rates_features(corrupted_input_build_rates) assert "Airframe" in internal_features.keys() assert "Build_Rate" in exogenous_features.keys() generated_build_rates, default_build_rate_value = exogenous_features["Build_Rate"] expected_build_rates = pd.DataFrame( [ # 5.5 is a mean of build rate value after aggregating first two input rows that differ only by Project_ID ["Contract_1", "Airframe_1", datetime(2015, 1, 1), 5.5], ["Contract_1", "Airframe_1", datetime(2017, 1, 1), 3.0], ["Contract_1", "Airframe_2", datetime(2016, 1, 1), 2.0], ], columns=["Contract_ID", "Airframe", "Date", "Value"], ) assert default_build_rate_value == 1 assert BUILD_RATES_AGGREGATION_WARNING in caplog.messages pd.testing.assert_frame_equal(expected_build_rates, generated_build_rates)
def test_generate_build_rates_features(caplog: LogCaptureFixture) -> None: caplog.set_level(logging.WARNING) internal_features, exogenous_features = generate_build_rates_features(INPUT_BUILD_RATES) assert "Airframe" in internal_features.keys() assert "Build_Rate" in exogenous_features.keys() generated_build_rates, default_build_rate_value = exogenous_features["Build_Rate"] expected_build_rates = pd.DataFrame( [ ["Contract_1", "Airframe_1", datetime(2015, 1, 1), 1.0], ["Contract_1", "Airframe_1", datetime(2017, 1, 1), 3.0], ["Contract_1", "Airframe_2", datetime(2016, 1, 1), 2.0], ], columns=["Contract_ID", "Airframe", "Date", "Value"], ) assert default_build_rate_value == 1 assert BUILD_RATES_AGGREGATION_WARNING not in caplog.messages pd.testing.assert_frame_equal(expected_build_rates, generated_build_rates) expected_sales = pd.DataFrame( [ [1, "Project_1", "Contract_1", datetime(2015, 1, 1), "Airframe_1"], [2, "Project_3", "Contract_1", datetime(2016, 1, 1), "Airframe_2"], [3, "Project_1", "Contract_2", datetime(2016, 1, 1), "No Airframe"], ], columns=["Item_ID", "Project_ID", "Contract_ID", "Date", "Airframe"], ) sales_with_airframe_flag = internal_features["Airframe"](INPUT_SALES, name="Airframe") # type: ignore pd.testing.assert_frame_equal(expected_sales, sales_with_airframe_flag)
def set_log_level(caplog: LogCaptureFixture) -> None: """Set the log level. Set the log level to DEBUG for our testing to make sure that any bad log statements throw errors. """ caplog.set_level(logging.DEBUG)
def test_bad_keyid(clirunner, known_good_image_local: TypingKnownGoodImage, caplog: LogCaptureFixture): """Test docker-sign can handle invalid keyids.""" caplog.clear() caplog.set_level(logging.DEBUG) source = known_good_image_local["image_name"] destination = deepcopy(source) destination.digest = None destination.tag += "_foobar" with insecure_registry(): result = clirunner.invoke( cli, args=[ "registry", "--keyid", "invalidkeyid", str(source), str(destination), ], input="\n", ) assert result.exception assert "Integrity check passed." in caplog.text assert "Failed to create signature!" in caplog.text
def test_locker_should_emit_warnings_if_lock_version_is_newer_but_allowed( locker: Locker, caplog: LogCaptureFixture ): version = ".".join(Version.parse(Locker._VERSION).next_minor().text.split(".")[:2]) content = f"""\ [metadata] lock-version = "{version}" python-versions = "~2.7 || ^3.4" content-hash = "c3d07fca33fba542ef2b2a4d75bf5b48d892d21a830e2ad9c952ba5123a52f77" [metadata.files] """ caplog.set_level(logging.WARNING, logger="poetry.packages.locker") locker.lock.write(tomlkit.parse(content)) _ = locker.lock_data assert len(caplog.records) == 1 record = caplog.records[0] assert record.levelname == "WARNING" expected = """\ The lock file might not be compatible with the current version of Poetry. Upgrade Poetry to ensure the lock file is read properly or, alternatively, \ regenerate the lock file with the `poetry lock` command.\ """ assert record.message == expected
def test_unauthorized_destination( clirunner, known_good_image_local: TypingKnownGoodImage, gpgsigner: GPGSigner, caplog: LogCaptureFixture, ): """Test docker-sign can handle incorrect credentials.""" caplog.clear() caplog.set_level(logging.DEBUG) source = known_good_image_local["image_name"] destination = deepcopy(source) destination.digest = None destination.tag += "_foobar" with insecure_registry(), temporary_gpg_homedir(gpgsigner.homedir): result = clirunner.invoke( cli, args=[ "registry", "--keyid", gpgsigner.keyid, str(known_good_image_local["image_name"]), f"{Indices.DOCKERHUB}/dummy:dummy", ], input=f"{gpgsigner.passphrase}\n", ) assert result.exception assert "Integrity check passed." in caplog.text assert "401" in caplog.text assert "Unauthorized" in caplog.text
def test_unauthorized_source( clirunner, known_good_image_local: TypingKnownGoodImage, caplog: LogCaptureFixture, ): """Test docker-sign can handle incorrect credentials.""" caplog.clear() caplog.set_level(logging.DEBUG) with insecure_registry(): result = clirunner.invoke( cli, args=[ "registry", "--keyid", "invalidkeyid", f"{Indices.DOCKERHUB}/dummy:dummy", str(known_good_image_local["image_name"]), ], input="\n", ) assert result.exception assert "401" in caplog.text assert "Unauthorized" in caplog.text
def test_gevent_worker_abort_messages( stub_event_backend: EventBackend, caplog: LogCaptureFixture ) -> None: import gevent # capture all messages caplog.set_level(logging.NOTSET) # Given a middleware with an abortable "thread" greenlet = gevent.spawn() middleware = Abortable(backend=stub_event_backend) middleware.manager.abortables = {"fake_message_id": (1, greenlet)} # When the message is aborted middleware.manager.abort("fake_message_id") # An abort exception is raised in the thread assert isinstance(greenlet.exception, Abort) # And abort actions are logged assert len(caplog.record_tuples) == 1 assert caplog.record_tuples == [ ( "dramatiq_abort.middleware.Abortable", logging.INFO, ("Aborting task. Raising exception in worker thread 1."), ) ]
def test_no_signatures_resign( clirunner, known_good_image_local: TypingKnownGoodImage, gpgsigner: GPGSigner, caplog: LogCaptureFixture, ): """Test docker-sign can resign images without existing signatures.""" caplog.clear() caplog.set_level(logging.DEBUG) source = known_good_image_local["image_name"] destination = deepcopy(source) destination.digest = None destination.tag += "_resigned" with insecure_registry(), temporary_gpg_homedir(gpgsigner.homedir): result = clirunner.invoke( cli, args=[ "--signature-type", "resign", "registry", "--keyid", gpgsigner.keyid, str(source), str(destination), ], input=f"{gpgsigner.passphrase}\n", ) assert not result.exception assert "Integrity check passed." in caplog.text assert "Created new image" in caplog.text assert str(destination) in caplog.text
def test_worker_abort_messages( raise_thread_exception: mock.Mock, stub_event_backend: EventBackend, caplog: LogCaptureFixture, ) -> None: # capture all messages caplog.set_level(logging.NOTSET) # Given a middleware with an abortable "thread" middleware = Abortable(backend=stub_event_backend) middleware.manager.abortables = {"fake_message_id": 1} # When the message is aborted middleware.manager.abort("fake_message_id") # An abort exception is raised in the thread raise_thread_exception.assert_has_calls([mock.call(1, Abort)]) # And abort actions are logged assert len(caplog.record_tuples) == 1 assert caplog.record_tuples == [ ( "dramatiq_abort.middleware.Abortable", logging.INFO, ("Aborting task. Raising exception in worker thread 1."), ) ]
def test_logging_trace_spans(datadog_tracer: ddtrace.Tracer, caplog: LogCaptureFixture): """Test the DatadogLoggingTraceFilter can actually capture trace span data in log output""" # Enable log output for this logger for duration of this test caplog.set_level(logging.DEBUG, DatadogLoggingTraceFilter._log.name) test = f"{inspect.stack()[0][3]}" DatadogLoggingTraceFilter.activate() with ddtrace.tracer.trace( name=f"{test}_operation", service=f"{test}_service", resource=f"{test}_resource", span_type=SpanTypes.TEST, ) as span: trace_id = span.trace_id logger = logging.getLogger(f"{test}_logger") test_msg = f"a test message was logged during {test}" logger.warning(test_msg) # do things x = 2**5 thirty_two_squares = [m for m in map(lambda y: y**2, range(x))] assert thirty_two_squares[-1] == 961 assert test_msg in caplog.text, "caplog.text did not seem to capture logging output during test" assert f"SPAN#{trace_id}" in caplog.text, "span marker not found in logging output" assert f"TRACE#{trace_id}" in caplog.text, "trace marker not found in logging output" assert f"resource {test}_resource" in caplog.text, "traced resource not found in logging output"
def test_forecast_model(monkeypatch: MonkeyPatch, caplog: LogCaptureFixture) -> None: caplog.set_level(logging.INFO) mock_model_config = Mock( spec=BaseModelConfig, DEFAULT_HYPER_PARAMS=BaseModelConfig.DEFAULT_HYPER_PARAMS, HYPER_SPACE=BaseModelConfig.HYPER_SPACE, OVERRIDE_HYPER_PARAMS={}, ) mock_cleanup_h2o_model = Mock() mock_time_series = Mock(spec=TimeSeries) monkeypatch.setattr(H2OGradientBoostingModel, "train", lambda _: None) monkeypatch.setattr(H2OGradientBoostingModel, "predict", lambda _, __: mock_time_series) monkeypatch.setattr(H2OGradientBoostingModel, "estimator", Mock(key="Test Model")) monkeypatch.setattr(forecast, "_cleanup_h2o_model", mock_cleanup_h2o_model) time_series_prediction = forecast.forecast_model( mock_model_config, mock_time_series, Mock(spec=DataOutput), optimize_hyperparameters=False) assert "Forecasting model: Test Model" in caplog.messages assert time_series_prediction is mock_time_series mock_cleanup_h2o_model.assert_called_once()
def test_generate_from_schema_deprecation_warning( tmp_path: Path, caplog: LogCaptureFixture, minify: bool, expect_warning: bool) -> None: caplog.set_level(logging.INFO) generate_from_schema(get_test_case_path("basic"), minify=minify) _assert_deprecation_message(caplog, expect_warning)
def test_load_account_data(prepare_database: Iterator[None], model_config: BaseModelConfig, caplog: LogCaptureFixture) -> None: caplog.set_level(logging.INFO) account_data = model_config._data_loader.load_account_data( model_config, DUMMY_CLEANED_DATA_RUN_ID) expected_df = pd.DataFrame(EXPECTED_DATA) assert_frame_equal(expected_df, account_data) assert "Loaded account data with 2 rows from internal database" in caplog.messages
def test_unauthorized(runner, caplog: LogCaptureFixture): """Test docker-verify can handle incorrect credentials.""" caplog.clear() caplog.set_level(logging.DEBUG) image = f"{Indices.DOCKERHUB}/dummy:dummy" result = runner.invoke(cli, args=["registry", image]) assert isinstance(result.exception, SystemExit) assert "401" in caplog.text assert "Unauthorized" in caplog.text assert image in caplog.text
def test_not_found(runner, caplog: LogCaptureFixture): """Test docker-verify can handle incorrect image names.""" caplog.clear() caplog.set_level(logging.DEBUG) image = f"{Indices.DOCKERHUB}/library/python:dummy" result = runner.invoke(cli, args=["registry", image]) assert isinstance(result.exception, SystemExit) assert "404" in caplog.text assert "Not Found" in caplog.text assert image in caplog.text
def test_setup_auth_idempotent(debug_auth_app: FastAPI, caplog: LogCaptureFixture) -> None: logger_name = auth_app.__name__ caplog.set_level(logging.INFO, logger=logger_name) auth_settings = get_auth_settings() AuthRouterBuilder(auth_settings).setup_first_superuser(get_engine()) name, level, message = caplog.record_tuples[-1] assert name == logger_name assert level == logging.INFO assert message == "First superuser already exists."
def test_capture(capsys: CaptureFixture[str], caplog: LogCaptureFixture) -> None: caplog.set_level(logging.DEBUG) print('out') print('err', file=sys.stderr) magic = 'zzyggdr' logger.info(magic) streams = capsys.readouterr() assert streams.out == 'out\n' assert streams.err == 'err\n' assert magic in caplog.text