示例#1
0
def test_print_date():
    # type: () -> None
    settings = Settings()
    settings.date_format = "%Y-%m-%d %I:%M %p"
    setattr(settings, "timezone", "US/Pacific")  # work around mypy confusion
    template_engine = BaseTemplateEngine(settings, "grouper.fe")

    for date_, expected, msg in [
        (datetime(2015, 8, 11, 18, tzinfo=UTC), "2015-08-11 11:00 AM", "from datetime object"),
        (datetime(2015, 8, 11, 18, 0, 10, 10, tzinfo=UTC), "2015-08-11 11:00 AM", "ignore sec/ms"),
        (datetime(2015, 8, 11, 18, 0, 10, 10), "2015-08-11 11:00 AM", "add tzinfo if needed"),
    ]:
        assert template_engine.print_date(date_) == expected, msg
示例#2
0
def test_mask_passsword_in_logs(caplog):
    # type: (LogCaptureFixture) -> None
    settings = Settings()
    settings._logger.setLevel(logging.DEBUG)
    settings.database_source = "/path/to/program"
    test_url = "mysql://*****:*****@example.com:8888/merou"
    expected_url = "mysql://*****:*****@example.com:8888/merou"

    # Reading settings.database will run the external program and trigger the logging.
    with patch("subprocess.check_output") as mock_subprocess:
        mock_subprocess.return_value = "mysql://*****:*****@example.com:8888/merou"
        assert settings.database == test_url

    assert test_url not in caplog.text
    assert expected_url in caplog.text
示例#3
0
def test_mask_passsword_in_logs(caplog):
    # type: (LogCaptureFixture) -> None
    settings = Settings()
    settings._logger.setLevel(logging.DEBUG)
    settings.database_source = "/path/to/program"
    test_url = "mysql://*****:*****@example.com:8888/merou"
    expected_url = "mysql://*****:*****@example.com:8888/merou"

    # Reading settings.database will run the external program and trigger the logging.
    with patch("subprocess.check_output") as mock_subprocess:
        mock_subprocess.return_value = b"mysql://*****:*****@example.com:8888/merou"
        assert settings.database == test_url

    assert test_url not in caplog.text
    assert expected_url in caplog.text
示例#4
0
def test_print_date():
    # type: () -> None
    settings = Settings()
    settings.date_format = "%Y-%m-%d %I:%M %p"
    setattr(settings, "timezone", "US/Pacific")  # work around mypy confusion
    template_engine = BaseTemplateEngine(settings, "grouper.fe")

    for date_, expected, msg in [
        (datetime(2015, 8, 11, 18,
                  tzinfo=UTC), "2015-08-11 11:00 AM", "from datetime object"),
        (datetime(2015, 8, 11, 18, 0, 10, 10,
                  tzinfo=UTC), "2015-08-11 11:00 AM", "ignore sec/ms"),
        (datetime(2015, 8, 11, 18, 0, 10,
                  10), "2015-08-11 11:00 AM", "add tzinfo if needed"),
    ]:
        assert template_engine.print_date(date_) == expected, msg
示例#5
0
def fe_app(session, standard_graph, tmpdir):
    # type: (Session, GroupGraph, LocalPath) -> GrouperApplication
    settings = Settings({"debug": False})
    usecase_factory = create_graph_usecase_factory(settings, session, standard_graph)
    return create_fe_application(
        settings, usecase_factory, "", xsrf_cookies=False, session=lambda: session
    )
示例#6
0
def session(request, tmpdir):
    # type: (FixtureRequest, LocalPath) -> None
    settings = Settings()
    set_global_settings(settings)

    # Reinitialize plugins in case a previous test configured some.
    set_global_plugin_proxy(PluginProxy([]))

    db_engine = get_db_engine(db_url(tmpdir))

    # Clean up from previous tests if using a persistent database.
    if "MEROU_TEST_DATABASE" in os.environ:
        Model.metadata.drop_all(db_engine)

    # Create the database schema and the corresponding session.
    Model.metadata.create_all(db_engine)
    Session.configure(bind=db_engine)
    session = Session()

    def fin():
        # type: () -> None
        """Explicitly close the session to avoid any dangling transactions."""
        session.close()

    request.addfinalizer(fin)
    return session
示例#7
0
    def __init__(self, tmpdir):
        # type: (LocalPath) -> None
        self.settings = Settings()
        self.settings.database = db_url(tmpdir)
        self.plugins = PluginProxy([])

        # Reinitialize the global plugin proxy with an empty set of plugins in case a previous test
        # initialized plugins.  This can go away once a plugin proxy is injected into everything
        # that needs it instead of maintained as a global.
        set_global_plugin_proxy(self.plugins)

        self.initialize_database()
        self.session = SessionFactory(self.settings).create_session()
        self.graph = GroupGraph()
        session_factory = SingletonSessionFactory(self.session)
        self.repository_factory = GraphRepositoryFactory(
            self.settings, self.plugins, session_factory, self.graph)
        self.sql_repository_factory = SQLRepositoryFactory(
            self.settings, self.plugins, session_factory)
        self.service_factory = ServiceFactory(self.settings, self.plugins,
                                              self.repository_factory)
        self.usecase_factory = UseCaseFactory(self.settings, self.plugins,
                                              self.service_factory)
        self._transaction_service = self.service_factory.create_transaction_service(
        )
示例#8
0
def test_timezone():
    # type: () -> None
    settings = Settings()

    # mypy 0.700 thinks setting timezone to a str is a type mismatch because it doesn't understand
    # the type magic, so work around the type error by using setattr.
    setattr(settings, "timezone", "US/Eastern")
    assert settings.timezone == pytz.timezone("US/Eastern")
示例#9
0
def test_mask_passsword_in_logs():
    # type: () -> None
    settings = Settings()
    settings.database_source = "/path/to/program"
    test_url = "mysql://*****:*****@example.com:8888/merou"
    expected_url = "mysql://*****:*****@example.com:8888/merou"

    log_output = StringIO()
    log_handler = logging.StreamHandler(log_output)
    settings._logger.addHandler(log_handler)
    settings._logger.setLevel(logging.DEBUG)

    # Reading settings.database will run the external program and trigger the logging.
    with patch("subprocess.check_output") as mock_subprocess:
        mock_subprocess.return_value = "mysql://*****:*****@example.com:8888/merou"
        assert settings.database == test_url

    output = log_output.getvalue()
    assert test_url not in output
    assert expected_url in output
示例#10
0
 def __init__(self, tmpdir):
     # type: (LocalPath) -> None
     self.session = self.create_session(tmpdir)
     self.graph = GroupGraph()
     self.settings = Settings({"database": db_url(tmpdir)})
     self.repository_factory = GraphRepositoryFactory(
         self.settings, self.session, self.graph)
     self.service_factory = ServiceFactory(self.repository_factory)
     self.usecase_factory = UseCaseFactory(self.service_factory)
     self._transaction_service = self.service_factory.create_transaction_service(
     )
示例#11
0
    def __init__(self, tmpdir):
        # type: (LocalPath) -> None
        self.settings = Settings()
        self.settings.database = db_url(tmpdir)
        self.plugins = PluginProxy([])
        self.graph = GroupGraph()

        # Reinitialize the global plugin proxy with an empty set of plugins in case a previous test
        # initialized plugins.  This can go away once a plugin proxy is injected into everything
        # that needs it instead of maintained as a global.
        set_global_plugin_proxy(self.plugins)

        self.initialize_database()
        self.open_database()
示例#12
0
def test_database():
    # type: () -> None
    settings = Settings()

    # The default is uninitialized and should throw an error until we load a configuration.
    with pytest.raises(InvalidSettingsError):
        assert settings.database

    # If database is set, it should be used.
    settings.database = "sqlite:///grouper.sqlite"
    assert settings.database == "sqlite:///grouper.sqlite"
    settings.database_source = "/bin/false"
    assert settings.database == "sqlite:///grouper.sqlite"

    # If only database_source is set, it should be run to get a URL.
    settings.database = ""
    settings.database_source = "/path/to/program"
    with patch("subprocess.check_output") as mock_subprocess:
        mock_subprocess.return_value = "sqlite:///other.sqlite\n"
        assert settings.database == "sqlite:///other.sqlite"
        assert mock_subprocess.call_args_list == [
            call(["/path/to/program"], stderr=subprocess.STDOUT)
        ]

    # If the command fails, it should be retried.  Disable the delay to not make the test slow.
    settings = Settings()
    settings.database_source = "/path/to/program"
    with patch.object(Settings, "DB_URL_RETRY_DELAY", new=0):
        with patch("subprocess.check_output") as mock_subprocess:
            exception = subprocess.CalledProcessError(1, "/path/to/program")
            mock_subprocess.side_effect = [exception, "sqlite:///third.sqlite"]
            assert settings.database == "sqlite:///third.sqlite"
            assert mock_subprocess.call_count == 2

    # Commands that return an empty URL should also be retried.
    settings = Settings()
    settings.database_source = "/path/to/program"
    with patch.object(Settings, "DB_URL_RETRY_DELAY", new=0):
        with patch("subprocess.check_output") as mock_subprocess:
            mock_subprocess.side_effect = ["", "sqlite:///notempty.sqlite"]
            assert settings.database == "sqlite:///notempty.sqlite"
            assert mock_subprocess.call_count == 2

    # Too many retries should raise an exception.
    settings = Settings()
    settings.database_source = "/path/to/program"
    with patch.object(Settings, "DB_URL_RETRY_DELAY", new=0):
        with patch("subprocess.check_output") as mock_subprocess:
            mock_subprocess.return_value = ""
            with pytest.raises(DatabaseSourceException):
                assert settings.database
示例#13
0
from grouper.settings import Settings, settings as base_settings

settings = Settings.from_settings(base_settings, {"sleep_interval": 60})
示例#14
0
from grouper.settings import Settings, settings as base_settings

settings = Settings.from_settings(
    base_settings,
    {
        "address": None,
        "debug": False,
        "num_processes": 1,
        "port": 8990,
        "refresh_interval": 60
    },
)
示例#15
0
def test_database():
    # type: () -> None
    settings = Settings()

    # The default is uninitialized and should throw an error until we load a configuration.
    with pytest.raises(InvalidSettingsError):
        assert settings.database

    # If database is set, it should be used.
    settings.database = "sqlite:///grouper.sqlite"
    assert settings.database == "sqlite:///grouper.sqlite"
    settings.database_source = "/bin/false"
    assert settings.database == "sqlite:///grouper.sqlite"

    # If only database_source is set, it should be run to get a URL.
    settings.database = ""
    settings.database_source = "/path/to/program"
    with patch("subprocess.check_output") as mock_subprocess:
        mock_subprocess.return_value = b"sqlite:///other.sqlite\n"
        assert settings.database == "sqlite:///other.sqlite"
        assert mock_subprocess.call_args_list == [
            call(["/path/to/program"], stderr=subprocess.STDOUT)
        ]

    # If the command fails, it should be retried.  Disable the delay to not make the test slow.
    settings = Settings()
    settings.database_source = "/path/to/program"
    with patch.object(Settings, "DB_URL_RETRY_DELAY", new=0):
        with patch("subprocess.check_output") as mock_subprocess:
            exception = subprocess.CalledProcessError(1, "/path/to/program")
            mock_subprocess.side_effect = [exception, b"sqlite:///third.sqlite"]
            assert settings.database == "sqlite:///third.sqlite"
            assert mock_subprocess.call_count == 2

    # Commands that return an empty URL should also be retried.
    settings = Settings()
    settings.database_source = "/path/to/program"
    with patch.object(Settings, "DB_URL_RETRY_DELAY", new=0):
        with patch("subprocess.check_output") as mock_subprocess:
            mock_subprocess.side_effect = [b"", b"sqlite:///notempty.sqlite"]
            assert settings.database == "sqlite:///notempty.sqlite"
            assert mock_subprocess.call_count == 2

    # Too many retries should raise an exception.
    settings = Settings()
    settings.database_source = "/path/to/program"
    with patch.object(Settings, "DB_URL_RETRY_DELAY", new=0):
        with patch("subprocess.check_output") as mock_subprocess:
            mock_subprocess.return_value = b""
            with pytest.raises(DatabaseSourceException):
                assert settings.database
示例#16
0
def test_update_from_config(tmpdir):
    # type: (LocalPath) -> None
    settings = Settings()

    # Create a config file that sets database to different values in different sections.
    config_path = str(tmpdir.join("test.yaml"))
    with open(config_path, "w") as config:
        config.write(CONFIG_SECTIONS)

    # Default loading should only see the common section, but another can be specified.
    settings.update_from_config(config_path)
    assert settings.database == "foo"
    settings.update_from_config(config_path, section="other")
    assert settings.database == "bar"

    # The timezone attribute is special and should be converted to a timezone on setting.
    assert settings.timezone == pytz.timezone("UTC")
    with open(config_path, "w") as config:
        config.write("common:\n  timezone: US/Pacific\n")
    settings.update_from_config(config_path)
    assert settings.timezone == pytz.timezone("US/Pacific")

    # Create a config file that tries to set unknown or internal attributes.
    with open(config_path, "w") as config:
        config.write(CONFIG_BOGUS)
    settings.update_from_config(config_path)
    assert settings._logger != "bar"  # type: ignore[comparison-overlap]
    assert not hasattr(settings, "foo")

    # A configuration that doesn't set database or database_source should raise an exception.
    settings = Settings()
    with open(config_path, "w") as config:
        config.write("common:\n  auditors_group: some-group\n")
    with pytest.raises(InvalidSettingsError):
        settings.update_from_config(config_path)
示例#17
0
def test_update_from_config(tmpdir):
    # type: (LocalPath) -> None
    settings = Settings()

    # Create a config file that sets database to different values in different sections.
    config_path = str(tmpdir.join("test.yaml"))
    with open(config_path, "w") as config:
        config.write(CONFIG_SECTIONS)

    # Default loading should only see the common section, but another can be specified.
    settings.update_from_config(config_path)
    assert settings.database == "foo"
    settings.update_from_config(config_path, section="other")
    assert settings.database == "bar"

    # The timezone attribute is special and should be converted to a timezone on setting.
    assert settings.timezone == pytz.timezone("UTC")
    with open(config_path, "w") as config:
        config.write("common:\n  timezone: US/Pacific\n")
    settings.update_from_config(config_path)
    assert settings.timezone == pytz.timezone("US/Pacific")

    # Create a config file that tries to set unknown or internal attributes.
    with open(config_path, "w") as config:
        config.write(CONFIG_BOGUS)
    settings.update_from_config(config_path)
    assert settings._logger != "bar"
    assert not hasattr(settings, "foo")

    # A configuration that doesn't set database or database_source should raise an exception.
    settings = Settings()
    with open(config_path, "w") as config:
        config.write("common:\n  auditors_group: some-group\n")
    with pytest.raises(InvalidSettingsError):
        settings.update_from_config(config_path)
示例#18
0
def api_app(session, standard_graph):
    # type: (Session, GroupGraph) -> GrouperApplication
    settings = Settings({"debug": False})
    usecase_factory = create_graph_usecase_factory(settings, session, standard_graph)
    return create_api_application(standard_graph, settings, usecase_factory)
示例#19
0
def test_actor_for_edge_expiration(setup):
    # type: (SetupTest) -> None
    """Test choice of actor ID when expiring an edge.

    Our current audit log model has no concept of a system-generated change and has to map every
    change to a user ID that performed that change.  We previously had a bug where we would try to
    grab the first owner of the group and use them as the actor when someone expired out of a
    group, which caused uncaught exceptions if the group somehow ended up in a state with no
    owners.  Test that we do something sane when expiring edges if possible.

    Everything we're testing here is a workaround for a bug.  Once the audit log has been fixed so
    that we can log entries for system actions without attributing them to some user in the system,
    this test and all of the logic it's testing can go away.
    """
    settings = Settings()
    now_minus_one_second = datetime.utcfromtimestamp(int(time() - 1))
    audit_log_service = setup.service_factory.create_audit_log_service()

    # An expiring individual user should be logged with an actor ID of the user.
    with setup.transaction():
        setup.add_user_to_group("*****@*****.**",
                                "some-group",
                                expiration=now_minus_one_second)
    edge = setup.session.query(GroupEdge).filter_by(
        expiration=now_minus_one_second).one()
    notify_edge_expiration(settings, setup.session, edge)
    log_entries = audit_log_service.entries_affecting_user("*****@*****.**", 1)
    assert log_entries
    assert log_entries[0].actor == "*****@*****.**"
    assert log_entries[0].action == "expired_from_group"
    assert log_entries[0].on_user == "*****@*****.**"
    with setup.transaction():
        edge.delete(setup.session)

    # An expiring group should be logged with an actor ID of the owner of the parent group.
    with setup.transaction():
        setup.add_user_to_group("*****@*****.**", "parent-group", role="owner")
        setup.add_user_to_group("*****@*****.**", "child-group", role="owner")
        setup.add_group_to_group("child-group",
                                 "parent-group",
                                 expiration=now_minus_one_second)
    edge = setup.session.query(GroupEdge).filter_by(
        expiration=now_minus_one_second).one()
    notify_edge_expiration(settings, setup.session, edge)
    log_entries = audit_log_service.entries_affecting_group("child-group", 1)
    assert log_entries
    assert log_entries[0].actor == "*****@*****.**"
    assert log_entries[0].action == "expired_from_group"
    assert log_entries[0].on_group == "child-group"
    log_entries = audit_log_service.entries_affecting_group("parent-group", 1)
    assert log_entries
    assert log_entries[0].actor == "*****@*****.**"
    assert log_entries[0].action == "expired_from_group"
    assert log_entries[0].on_group == "parent-group"
    with setup.transaction():
        edge.delete(setup.session)

    # If the parent group has no owner, it should be logged with an actor ID of the owner of the
    # child group.
    with setup.transaction():
        setup.add_user_to_group("*****@*****.**", "a-group", role="owner")
        setup.add_group_to_group("a-group",
                                 "ownerless-group",
                                 expiration=now_minus_one_second)
    edge = setup.session.query(GroupEdge).filter_by(
        expiration=now_minus_one_second).one()
    notify_edge_expiration(settings, setup.session, edge)
    log_entries = audit_log_service.entries_affecting_group("a-group", 1)
    assert log_entries
    assert log_entries[0].actor == "*****@*****.**"
    assert log_entries[0].action == "expired_from_group"
    assert log_entries[0].on_group == "a-group"
    log_entries = audit_log_service.entries_affecting_group(
        "ownerless-group", 1)
    assert log_entries
    assert log_entries[0].actor == "*****@*****.**"
    assert log_entries[0].action == "expired_from_group"
    assert log_entries[0].on_group == "ownerless-group"
    with setup.transaction():
        edge.delete(setup.session)

    # If neither group has an owner, raise an exception.
    with setup.transaction():
        setup.add_group_to_group("other-group",
                                 "ownerless-group",
                                 expiration=now_minus_one_second)
    edge = setup.session.query(GroupEdge).filter_by(
        expiration=now_minus_one_second).one()
    with pytest.raises(UnknownActorDuringExpirationException):
        notify_edge_expiration(settings, setup.session, edge)