def test_is_testing_respects_environment_variable():
    try:
        prev_env_var_value = os.environ.pop("MLFLOW_AUTOLOGGING_TESTING", None)
        assert not is_testing()

        os.environ["MLFLOW_AUTOLOGGING_TESTING"] = "false"
        assert not is_testing()

        os.environ["MLFLOW_AUTOLOGGING_TESTING"] = "true"
        assert is_testing()
    finally:
        if prev_env_var_value:
            os.environ["MLFLOW_AUTOLOGGING_TESTING"] = prev_env_var_value
        else:
            del os.environ["MLFLOW_AUTOLOGGING_TESTING"]
def test_safe_patch_validates_autologging_runs_when_necessary_in_test_mode(
    patch_destination, test_autologging_integration
):
    assert autologging_utils.is_testing()

    def no_tag_run_patch_impl(original, *args, **kwargs):
        with mlflow.start_run(nested=True):
            return original(*args, **kwargs)

    safe_patch(test_autologging_integration, patch_destination, "fn", no_tag_run_patch_impl)

    with mock.patch(
        "mlflow.utils.autologging_utils.safety._validate_autologging_run",
        wraps=_validate_autologging_run,
    ) as validate_run_mock:

        with pytest.raises(
            AssertionError, match="failed to set autologging tag with expected value"
        ):
            patch_destination.fn()
            assert validate_run_mock.call_count == 1

        validate_run_mock.reset_mock()

        with mlflow.start_run(nested=True):
            # If a user-generated run existed prior to the autologged training session, we expect
            # that safe patch will not attempt to validate it
            patch_destination.fn()
            assert not validate_run_mock.called
def test_try_mlflow_log_propagates_exceptions_in_test_mode():
    assert autologging_utils.is_testing()

    def throwing_function():
        raise Exception("bad implementation")

    with pytest.raises(Exception, match="bad implementation"):
        try_mlflow_log(throwing_function)
def test_try_mlflow_log_emits_exceptions_as_warnings_in_standard_mode():
    assert not autologging_utils.is_testing()

    def throwing_function():
        raise Exception("bad implementation")

    with pytest.warns(UserWarning, match="bad implementation"):
        try_mlflow_log(throwing_function)
Beispiel #5
0
def test_mode_on():
    try:
        prev_env_var_value = os.environ.pop(_AUTOLOGGING_TEST_MODE_ENV_VAR, None)
        os.environ[_AUTOLOGGING_TEST_MODE_ENV_VAR] = "true"
        assert is_testing()
        yield
    finally:
        if prev_env_var_value:
            os.environ[_AUTOLOGGING_TEST_MODE_ENV_VAR] = prev_env_var_value
        else:
            del os.environ[_AUTOLOGGING_TEST_MODE_ENV_VAR]
def test_safe_patch_does_not_throw_when_autologging_runs_are_leaked_in_standard_mode(
    patch_destination, test_autologging_integration
):
    assert not autologging_utils.is_testing()

    def leak_run_patch_impl(original, *args, **kwargs):
        mlflow.start_run(nested=True)

    safe_patch(test_autologging_integration, patch_destination, "fn", leak_run_patch_impl)
    patch_destination.fn()
    assert mlflow.active_run()

    # End the leaked run
    mlflow.end_run()

    assert not mlflow.active_run()
def test_exception_safe_class_exhibits_expected_behavior_in_test_mode(baseclass, metaclass):
    assert autologging_utils.is_testing()

    class NonThrowingClass(baseclass, metaclass=metaclass):
        def function(self):
            return 10

    assert NonThrowingClass().function() == 10

    exc_to_throw = Exception("function error")

    class ThrowingClass(baseclass, metaclass=metaclass):
        def function(self):
            raise exc_to_throw

    with pytest.raises(Exception) as exc:
        ThrowingClass().function()

    assert exc.value == exc_to_throw
def test_exception_safe_function_exhibits_expected_behavior_in_test_mode():
    assert autologging_utils.is_testing()

    @exception_safe_function
    def non_throwing_function():
        return 10

    assert non_throwing_function() == 10

    exc_to_throw = Exception("function error")

    @exception_safe_function
    def throwing_function():
        raise exc_to_throw

    with pytest.raises(Exception) as exc:
        throwing_function()

    assert exc.value == exc_to_throw
def test_exception_safe_function_exhibits_expected_behavior_in_standard_mode():
    assert not autologging_utils.is_testing()

    @exception_safe_function
    def non_throwing_function():
        return 10

    assert non_throwing_function() == 10

    exc_to_throw = Exception("bad implementation")

    @exception_safe_function
    def throwing_function():
        raise exc_to_throw

    with mock.patch("mlflow.utils.autologging_utils._logger.warning") as logger_mock:
        throwing_function()
        assert logger_mock.call_count == 1
        message, formatting_arg = logger_mock.call_args[0]
        assert "unexpected error during autologging" in message
        assert formatting_arg == exc_to_throw
def test_exception_safe_class_exhibits_expected_behavior_in_standard_mode(baseclass, metaclass):
    assert not autologging_utils.is_testing()

    class NonThrowingClass(baseclass, metaclass=metaclass):
        def function(self):
            return 10

    assert NonThrowingClass().function() == 10

    exc_to_throw = Exception("function error")

    class ThrowingClass(baseclass, metaclass=metaclass):
        def function(self):
            raise exc_to_throw

    with mock.patch("mlflow.utils.autologging_utils._logger.warning") as logger_mock:
        ThrowingClass().function()

        assert logger_mock.call_count == 1

        message, formatting_arg = logger_mock.call_args[0]
        assert "unexpected error during autologging" in message
        assert formatting_arg == exc_to_throw
def test_safe_patch_throws_when_autologging_runs_are_leaked_in_test_mode(
    patch_destination, test_autologging_integration
):
    assert autologging_utils.is_testing()

    def leak_run_patch_impl(original, *args, **kwargs):
        mlflow.start_run(nested=True)

    safe_patch(test_autologging_integration, patch_destination, "fn", leak_run_patch_impl)
    with pytest.raises(AssertionError, match="leaked an active run"):
        patch_destination.fn()

    # End the leaked run
    mlflow.end_run()

    with mlflow.start_run():
        # If a user-generated run existed prior to the autologged training session, we expect
        # that safe patch will not throw a leaked run exception
        patch_destination.fn()
        # End the leaked nested run
        mlflow.end_run()

    assert not mlflow.active_run()
def test_safe_patch_does_not_validate_autologging_runs_in_standard_mode(
    patch_destination, test_autologging_integration
):
    assert not autologging_utils.is_testing()

    def no_tag_run_patch_impl(original, *args, **kwargs):
        with mlflow.start_run(nested=True):
            return original(*args, **kwargs)

    safe_patch(test_autologging_integration, patch_destination, "fn", no_tag_run_patch_impl)

    with mock.patch(
        "mlflow.utils.autologging_utils.safety._validate_autologging_run",
        wraps=_validate_autologging_run,
    ) as validate_run_mock:

        patch_destination.fn()

        with mlflow.start_run(nested=True):
            # If a user-generated run existed prior to the autologged training session, we expect
            # that safe patch will not attempt to validate it
            patch_destination.fn()

        assert not validate_run_mock.called