Пример #1
0
def async_server(standard_graph, tmpdir):
    proxy_port = _get_unused_port()
    fe_port = _get_unused_port()

    cmds = [[
        src_path("bin", "grouper-ctl"), "-vvc",
        src_path("config", "dev.yaml"), "user_proxy", "-P",
        str(fe_port), "-p",
        str(proxy_port), "*****@*****.**"
    ],
            [
                src_path("bin", "grouper-fe"),
                "-c",
                src_path("config", "dev.yaml"),
                "-p",
                str(fe_port),
                "-d",
                db_url(tmpdir),
            ]]

    subprocesses = []

    for cmd in cmds:
        p = subprocess.Popen(cmd)
        subprocesses.append(p)

    wait_until_accept(proxy_port)

    yield "http://localhost:{}".format(proxy_port)

    for p in subprocesses:
        p.kill()
Пример #2
0
def session(request, tmpdir):
    # type: (FixtureRequest, LocalPath) -> None
    settings = Settings()
    set_global_settings(settings)

    # Reinitialize plugins in case a previous test configured some.
    set_global_plugin_proxy(PluginProxy([]))

    db_engine = get_db_engine(db_url(tmpdir))

    # Clean up from previous tests if using a persistent database.
    if "MEROU_TEST_DATABASE" in os.environ:
        Model.metadata.drop_all(db_engine)

    # Create the database schema and the corresponding session.
    Model.metadata.create_all(db_engine)
    Session.configure(bind=db_engine)
    session = Session()

    def fin():
        # type: () -> None
        """Explicitly close the session to avoid any dangling transactions."""
        session.close()

    request.addfinalizer(fin)
    return session
Пример #3
0
def api_server(tmpdir):
    # type: (LocalPath) -> Iterator[str]
    api_socket = _bind_socket()
    api_port = api_socket.getsockname()[1]

    cmd = [
        sys.executable,
        src_path("bin", "grouper-api"),
        "-vvc",
        src_path("config", "dev.yaml"),
        "-d",
        db_url(tmpdir),
        "--listen-stdin",
    ]

    logging.info("Starting server with command: %s", " ".join(cmd))
    p = subprocess.Popen(cmd, env=bin_env(), stdin=api_socket.fileno())
    api_socket.close()

    logging.info("Waiting on server to come online")
    _wait_until_accept(api_port)
    logging.info("Connection established")

    yield "localhost:{}".format(api_port)

    p.kill()
Пример #4
0
def frontend_server(tmpdir, user):
    # type: (LocalPath, str) -> Iterator[str]
    proxy_socket = _bind_socket()
    proxy_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
    proxy_port = proxy_socket.getsockname()[1]
    fe_socket = _bind_socket()
    fe_port = fe_socket.getsockname()[1]

    proxy_cmd = [
        sys.executable,
        src_path("bin", "grouper-ctl"),
        "-vvc",
        src_path("config", "dev.yaml"),
        "user_proxy",
        "-P",
        str(fe_port),
        "-p",
        str(proxy_port),
        user,
    ]
    fe_cmd = [
        sys.executable,
        src_path("bin", "grouper-fe"),
        "-vvc",
        src_path("config", "dev.yaml"),
        "-d",
        db_url(tmpdir),
        "--listen-stdin",
    ]

    subprocesses = []

    logging.info("Starting command: %s", " ".join(fe_cmd))
    fe_process = subprocess.Popen(fe_cmd, env=bin_env(), stdin=fe_socket.fileno())
    subprocesses.append(fe_process)
    fe_socket.close()

    # TODO(rra): There is a race condition here because grouper-ctl user_proxy doesn't implement
    # --listen-stdin yet, which in turn is because the built-in Python HTTPServer doesn't support
    # wrapping a pre-existing socket.  Since we have to close the socket so that grouper-ctl
    # user_proxy can re-open it, something else might grab it in the interim.  Once it is rewritten
    # using Tornado, it can use the same approach as the frontend and API servers and take an open
    # socket on standard input.  At that point, we can also drop the SO_REUSEADDR above, which is
    # there to protect against the race condition.
    logging.info("Starting command: %s", " ".join(proxy_cmd))
    proxy_socket.close()
    proxy_process = subprocess.Popen(proxy_cmd, env=bin_env())
    subprocesses.append(proxy_process)

    logging.info("Waiting on server to come online")
    _wait_until_accept(fe_port)
    _wait_until_accept(proxy_port)
    logging.info("Connection established")

    yield "http://localhost:{}".format(proxy_port)

    for p in subprocesses:
        p.kill()
Пример #5
0
 def __init__(self, tmpdir):
     # type: (LocalPath) -> None
     self.session = self.create_session(tmpdir)
     self.graph = GroupGraph()
     self.settings = Settings({"database": db_url(tmpdir)})
     self.repository_factory = GraphRepositoryFactory(
         self.settings, self.session, self.graph)
     self.service_factory = ServiceFactory(self.repository_factory)
     self.usecase_factory = UseCaseFactory(self.service_factory)
     self._transaction_service = self.service_factory.create_transaction_service(
     )
Пример #6
0
    def create_session(self, tmpdir):
        # type: (LocalPath) -> Session
        db_engine = get_db_engine(db_url(tmpdir))

        # If using a persistent database, clear the database first.
        if "MEROU_TEST_DATABASE" in os.environ:
            Model.metadata.drop_all(db_engine)

        # Create the database schema and the corresponding session.
        Model.metadata.create_all(db_engine)
        Session.configure(bind=db_engine)
        return Session()
Пример #7
0
    def __init__(self, tmpdir):
        # type: (LocalPath) -> None
        self.settings = Settings()
        self.settings.database = db_url(tmpdir)
        self.plugins = PluginProxy([])
        self.graph = GroupGraph()

        # Reinitialize the global plugin proxy with an empty set of plugins in case a previous test
        # initialized plugins.  This can go away once a plugin proxy is injected into everything
        # that needs it instead of maintained as a global.
        set_global_plugin_proxy(self.plugins)

        self.initialize_database()
        self.open_database()
Пример #8
0
def session(request, tmpdir):
    db_engine = get_db_engine(db_url(tmpdir))

    # Create the database schema and the corresponding session.
    Model.metadata.create_all(db_engine)
    Session.configure(bind=db_engine)
    session = Session()

    def fin():
        # type: () -> None
        """Explicitly close the session and clean up if using a persistent database."""
        session.close()
        if "MEROU_TEST_DATABASE" in os.environ:
            Model.metadata.drop_all(db_engine)

    request.addfinalizer(fin)
    return session
Пример #9
0
    def __init__(self, tmpdir):
        # type: (LocalPath) -> None
        self.settings = Settings()
        self.settings.database = db_url(tmpdir)
        self.plugin_proxy = PluginProxy([])

        # Reinitialize the global plugin proxy with an empty set of plugins in case a previous test
        # initialized plugins.  This can go away once a plugin proxy is injected into everything
        # that needs it instead of maintained as a global.
        set_global_plugin_proxy(self.plugin_proxy)

        self.initialize_database()
        self.session = SessionFactory(self.settings).create_session()
        self.graph = GroupGraph()
        self.repository_factory = GraphRepositoryFactory(
            self.settings, self.plugin_proxy, SingletonSessionFactory(self.session), self.graph
        )
        self.service_factory = ServiceFactory(self.repository_factory)
        self.usecase_factory = UseCaseFactory(self.settings, self.service_factory)
        self._transaction_service = self.service_factory.create_transaction_service()
Пример #10
0
def async_api_server(standard_graph, tmpdir):
    api_port = _get_unused_port()

    cmd = [
        src_path("bin", "grouper-api"),
        "-c",
        src_path("config", "dev.yaml"),
        "-p",
        str(api_port),
        "-d",
        db_url(tmpdir),
    ]

    p = subprocess.Popen(cmd)

    wait_until_accept(api_port)

    yield "localhost:{}".format(api_port)

    p.kill()