def async_server(standard_graph, tmpdir): proxy_port = _get_unused_port() fe_port = _get_unused_port() cmds = [[ src_path("bin", "grouper-ctl"), "-vvc", src_path("config", "dev.yaml"), "user_proxy", "-P", str(fe_port), "-p", str(proxy_port), "*****@*****.**" ], [ src_path("bin", "grouper-fe"), "-c", src_path("config", "dev.yaml"), "-p", str(fe_port), "-d", db_url(tmpdir), ]] subprocesses = [] for cmd in cmds: print("Starting command: " + " ".join(cmd)) p = subprocess.Popen(cmd) subprocesses.append(p) print("Waiting on server to come online") wait_until_accept(proxy_port) print("Connection established") yield "http://localhost:{}".format(proxy_port) for p in subprocesses: p.kill()
def api_server(tmpdir): # type: (LocalPath) -> Iterator[str] api_port = _get_unused_port() cmd = [ sys.executable, src_path("bin", "grouper-api"), "-c", src_path("config", "dev.yaml"), "-p", str(api_port), "-d", db_url(tmpdir), ] logging.info("Starting server with command: %s", " ".join(cmd)) p = subprocess.Popen(cmd, env=bin_env()) logging.info("Waiting on server to come online") _wait_until_accept(api_port) logging.info("Connection established") yield "localhost:{}".format(api_port) p.kill()
def api_server(tmpdir): # type: (LocalPath) -> Iterator[str] api_socket = _bind_socket() api_port = api_socket.getsockname()[1] cmd = [ sys.executable, src_path("bin", "grouper-api"), "-vvc", src_path("config", "test.yaml"), "-d", db_url(tmpdir), "--listen-stdin", ] logging.info("Starting server with command: %s", " ".join(cmd)) p = subprocess.Popen(cmd, env=bin_env(), stdin=api_socket.fileno()) api_socket.close() logging.info("Waiting on server to come online") _wait_until_accept(api_port) logging.info("Connection established") yield "localhost:{}".format(api_port) p.terminate()
def frontend_server(tmpdir, user): # type: (LocalPath, str) -> Iterator[str] proxy_socket = _bind_socket() proxy_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) proxy_port = proxy_socket.getsockname()[1] fe_socket = _bind_socket() fe_port = fe_socket.getsockname()[1] proxy_cmd = [ sys.executable, src_path("bin", "grouper-ctl"), "-vvc", src_path("config", "test.yaml"), "user_proxy", "-P", str(fe_port), "-p", str(proxy_port), user, ] fe_cmd = [ sys.executable, src_path("bin", "grouper-fe"), "-vvc", src_path("config", "test.yaml"), "-d", db_url(tmpdir), "--listen-stdin", ] subprocesses = [] logging.info("Starting command: %s", " ".join(fe_cmd)) fe_process = subprocess.Popen(fe_cmd, env=bin_env(), stdin=fe_socket.fileno()) subprocesses.append(fe_process) fe_socket.close() # TODO(rra): There is a race condition here because grouper-ctl user_proxy doesn't implement # --listen-stdin yet, which in turn is because the built-in Python HTTPServer doesn't support # wrapping a pre-existing socket. Since we have to close the socket so that grouper-ctl # user_proxy can re-open it, something else might grab it in the interim. Once it is rewritten # using Tornado, it can use the same approach as the frontend and API servers and take an open # socket on standard input. At that point, we can also drop the SO_REUSEADDR above, which is # there to protect against the race condition. logging.info("Starting command: %s", " ".join(proxy_cmd)) proxy_socket.close() proxy_process = subprocess.Popen(proxy_cmd, env=bin_env()) subprocesses.append(proxy_process) logging.info("Waiting on server to come online") _wait_until_accept(fe_port) _wait_until_accept(proxy_port) logging.info("Connection established") yield "http://localhost:{}".format(proxy_port) for p in subprocesses: p.terminate()
def async_server(standard_graph, tmpdir): proxy_port = _get_unused_port() fe_port = _get_unused_port() cmds = [[ src_path("bin", "grouper-ctl"), "-vvc", src_path("config", "dev.yaml"), "user_proxy", "-P", str(fe_port), "-p", str(proxy_port), "*****@*****.**" ], [ src_path("bin", "grouper-fe"), "-c", src_path("config", "dev.yaml"), "-p", str(fe_port), "-d", db_url(tmpdir), ]] subprocesses = [] for cmd in cmds: p = subprocess.Popen(cmd) subprocesses.append(p) wait_until_accept(proxy_port) yield "http://localhost:{}".format(proxy_port) for p in subprocesses: p.kill()
def api_server(tmpdir): # type: (LocalPath) -> Iterator[str] api_socket = _bind_socket() api_port = api_socket.getsockname()[1] cmd = [ sys.executable, src_path("bin", "grouper-api"), "-vvc", src_path("config", "dev.yaml"), "-d", db_url(tmpdir), "--listen-stdin", ] logging.info("Starting server with command: %s", " ".join(cmd)) p = subprocess.Popen(cmd, env=bin_env(), stdin=api_socket.fileno()) api_socket.close() logging.info("Waiting on server to come online") _wait_until_accept(api_port) logging.info("Connection established") yield "localhost:{}".format(api_port) p.kill()
def frontend_server(tmpdir, user): # type: (LocalPath, str) -> Iterator[str] proxy_socket = _bind_socket() proxy_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) proxy_port = proxy_socket.getsockname()[1] fe_socket = _bind_socket() fe_port = fe_socket.getsockname()[1] proxy_cmd = [ sys.executable, src_path("bin", "grouper-ctl"), "-vvc", src_path("config", "dev.yaml"), "user_proxy", "-P", str(fe_port), "-p", str(proxy_port), user, ] fe_cmd = [ sys.executable, src_path("bin", "grouper-fe"), "-vvc", src_path("config", "dev.yaml"), "-d", db_url(tmpdir), "--listen-stdin", ] subprocesses = [] logging.info("Starting command: %s", " ".join(fe_cmd)) fe_process = subprocess.Popen(fe_cmd, env=bin_env(), stdin=fe_socket.fileno()) subprocesses.append(fe_process) fe_socket.close() # TODO(rra): There is a race condition here because grouper-ctl user_proxy doesn't implement # --listen-stdin yet, which in turn is because the built-in Python HTTPServer doesn't support # wrapping a pre-existing socket. Since we have to close the socket so that grouper-ctl # user_proxy can re-open it, something else might grab it in the interim. Once it is rewritten # using Tornado, it can use the same approach as the frontend and API servers and take an open # socket on standard input. At that point, we can also drop the SO_REUSEADDR above, which is # there to protect against the race condition. logging.info("Starting command: %s", " ".join(proxy_cmd)) proxy_socket.close() proxy_process = subprocess.Popen(proxy_cmd, env=bin_env()) subprocesses.append(proxy_process) logging.info("Waiting on server to come online") _wait_until_accept(fe_port) _wait_until_accept(proxy_port) logging.info("Connection established") yield "http://localhost:{}".format(proxy_port) for p in subprocesses: p.kill()
def test_list_pagination(tmpdir, setup, browser): # type: (LocalPath, SetupTest, Chrome) -> None """Test pagination. This forces the pagination to specific values, rather than using the page controls, since we don't create more than 100 permissions for testing. """ permissions = create_test_data(setup) settings = FrontendSettings() settings.update_from_config(src_path("config", "dev.yaml")) expected_permissions = [ (p.name, p.description, format_date(settings, p.created_on)) for p in permissions ] with frontend_server(tmpdir, "*****@*****.**") as frontend_url: browser.get(url(frontend_url, "/permissions?limit=1&offset=1")) page = PermissionsPage(browser) seen_permissions = [(r.name, r.description, r.created_on) for r in page.permission_rows] assert seen_permissions == sorted(expected_permissions)[1:2] assert page.limit_label == "Limit: 1" # Retrieve the last permission but with a larger limit to test that the limit isn't capped # to the number of returned items. browser.get(url(frontend_url, "/permissions?limit=10&offset=2")) page = PermissionsPage(browser) seen_permissions = [(r.name, r.description, r.created_on) for r in page.permission_rows] assert seen_permissions == sorted(expected_permissions)[2:] assert page.limit_label == "Limit: 10"
def call_main(session, tmpdir, *args): # type: (Session, LocalPath, *str) -> None """Legacy test driver, use run_ctl instead for all new code.""" config_path = src_path("config", "test.yaml") argv = ["grouper-ctl", "-c", config_path, "-d", db_url(tmpdir)] + list(args) main(sys_argv=argv, session=session)
def test_list_pagination(tmpdir, setup, browser): # type: (LocalPath, SetupTest, Chrome) -> None """Test pagination. This forces the pagination to specific values, rather than using the page controls, since we don't create more than 100 permissions for testing. """ permissions = create_test_data(setup) settings = FrontendSettings() settings.update_from_config(src_path("config", "dev.yaml")) expected_permissions = [ (p.name, p.description, format_date(settings, p.created_on)) for p in permissions ] with frontend_server(tmpdir, "*****@*****.**") as frontend_url: browser.get(url(frontend_url, "/permissions?limit=1&offset=1")) page = PermissionsPage(browser) seen_permissions = [(r.name, r.description, r.created_on) for r in page.permission_rows] assert seen_permissions == sorted(expected_permissions)[1:2] assert page.limit_label == "Limit: 1" # Retrieve the last permission but with a larger limit to test that the limit isn't capped # to the number of returned items. browser.get(url(frontend_url, "/permissions?limit=10&offset=2")) page = PermissionsPage(browser) seen_permissions = [(r.name, r.description, r.created_on) for r in page.permission_rows] assert seen_permissions == sorted(expected_permissions)[2:] assert page.limit_label == "Limit: 10"
def frontend_server(tmpdir, user): # type: (LocalPath, str) -> Iterator[str] proxy_port = _get_unused_port() fe_port = _get_unused_port() cmds = [ [ sys.executable, src_path("bin", "grouper-ctl"), "-vvc", src_path("config", "dev.yaml"), "user_proxy", "-P", str(fe_port), "-p", str(proxy_port), user, ], [ sys.executable, src_path("bin", "grouper-fe"), "-vvc", src_path("config", "dev.yaml"), "-p", str(fe_port), "-d", db_url(tmpdir), ], ] subprocesses = [] for cmd in cmds: logging.info("Starting command: %s", " ".join(cmd)) p = subprocess.Popen(cmd, env=bin_env()) subprocesses.append(p) logging.info("Waiting on server to come online") _wait_until_accept(proxy_port) _wait_until_accept(fe_port) logging.info("Connection established") yield "http://localhost:{}".format(proxy_port) for p in subprocesses: p.kill()
def run_ctl(setup, *args): # type: (SetupTest, *str) -> None argv = [ "grouper-ctl", "-c", src_path("config", "dev.yaml"), "-d", setup.settings.database, ] + list(args) main(sys_argv=argv, session=setup.session)
def async_api_server(standard_graph, tmpdir): api_port = _get_unused_port() cmd = [ src_path("bin", "grouper-api"), "-c", src_path("config", "dev.yaml"), "-p", str(api_port), "-d", db_url(tmpdir), ] p = subprocess.Popen(cmd) wait_until_accept(api_port) yield "localhost:{}".format(api_port) p.kill()
def async_api_server(standard_graph, tmpdir): api_port = _get_unused_port() cmd = [ src_path("bin", "grouper-api"), "-c", src_path("config", "dev.yaml"), "-p", str(api_port), "-d", db_url(tmpdir), ] print("Starting server with command: " + " ".join(cmd)) p = subprocess.Popen(cmd, env=bin_env()) print("Waiting on server to come online") wait_until_accept(api_port) print("Connection established") yield "localhost:{}".format(api_port) p.kill()
def test_list(tmpdir, setup, browser): # type: (LocalPath, SetupTest, Chrome) -> None permissions = create_test_data(setup) settings = FrontendSettings() settings.update_from_config(src_path("config", "dev.yaml")) expected_permissions = [ (p.name, p.description, format_date(settings, p.created_on)) for p in permissions ] with frontend_server(tmpdir, "*****@*****.**") as frontend_url: browser.get(url(frontend_url, "/permissions")) # Check the basic permission list. page = PermissionsPage(browser) seen_permissions = [(r.name, r.description, r.created_on) for r in page.permission_rows] assert seen_permissions == sorted(expected_permissions) assert page.heading == "Permissions" assert page.subheading == "{} permission(s)".format(len(expected_permissions)) assert page.limit_label == "Limit: 100" # Switch to only audited permissions. page.click_show_audited_button() seen_permissions = [(r.name, r.description, r.created_on) for r in page.permission_rows] audited = [p for p in expected_permissions if p[0] == "audited-permission"] assert seen_permissions == sorted(audited) assert page.heading == "Audited Permissions" assert page.subheading == "{} permission(s)".format(len(audited)) # Switch back to all permissions and sort by date. page.click_show_all_button() page.click_sort_by_date() seen_permissions = [(r.name, r.description, r.created_on) for r in page.permission_rows] expected_permissions_sorted_by_time = [ (p.name, p.description, format_date(settings, p.created_on)) for p in sorted(permissions, key=lambda p: p.created_on, reverse=True) ] assert seen_permissions == expected_permissions_sorted_by_time # Reverse the sort order. page.click_sort_by_date() seen_permissions = [(r.name, r.description, r.created_on) for r in page.permission_rows] assert seen_permissions == list(reversed(expected_permissions_sorted_by_time))
def test_list(tmpdir, setup, browser): # type: (LocalPath, SetupTest, Chrome) -> None permissions = create_test_data(setup) settings = FrontendSettings() settings.update_from_config(src_path("config", "dev.yaml")) expected_permissions = [ (p.name, p.description, format_date(settings, p.created_on)) for p in permissions ] with frontend_server(tmpdir, "*****@*****.**") as frontend_url: browser.get(url(frontend_url, "/permissions")) # Check the basic permission list. page = PermissionsPage(browser) seen_permissions = [(r.name, r.description, r.created_on) for r in page.permission_rows] assert seen_permissions == sorted(expected_permissions) assert page.heading == "Permissions" assert page.subheading == "{} permission(s)".format(len(expected_permissions)) assert page.limit_label == "Limit: 100" # Switch to only audited permissions. page.click_show_audited_button() seen_permissions = [(r.name, r.description, r.created_on) for r in page.permission_rows] audited = [p for p in expected_permissions if p[0] == "audited-permission"] assert seen_permissions == sorted(audited) assert page.heading == "Audited Permissions" assert page.subheading == "{} permission(s)".format(len(audited)) # Switch back to all permissions and sort by date. page.click_show_all_button() page.click_sort_by_date() seen_permissions = [(r.name, r.description, r.created_on) for r in page.permission_rows] expected_permissions_sorted_by_time = [ (p.name, p.description, format_date(settings, p.created_on)) for p in sorted(permissions, key=lambda p: p.created_on, reverse=True) ] assert seen_permissions == expected_permissions_sorted_by_time # Reverse the sort order. page.click_sort_by_date() seen_permissions = [(r.name, r.description, r.created_on) for r in page.permission_rows] assert seen_permissions == list(reversed(expected_permissions_sorted_by_time))
def test_api(): # type: () -> None bin_path = src_path("bin", "grouper-api") out = subprocess.check_output([bin_path, "--help"], env=bin_env()) assert out.startswith("usage: grouper-api")
def test_fe(): # type: () -> None bin_path = src_path("bin", "grouper-fe") out = subprocess.check_output([sys.executable, bin_path, "--help"], env=bin_env()) assert out.decode().startswith("usage: grouper-fe")
def test_fe(): # type: () -> None bin_path = src_path("bin", "grouper-fe") out = subprocess.check_output([sys.executable, bin_path, "--help"], env=bin_env()) assert out.decode().startswith("usage: grouper-fe")