def session(request, tmpdir): # type: (FixtureRequest, LocalPath) -> None settings = Settings() set_global_settings(settings) # Reinitialize plugins in case a previous test configured some. set_global_plugin_proxy(PluginProxy([])) db_engine = get_db_engine(db_url(tmpdir)) # Clean up from previous tests if using a persistent database. if "MEROU_TEST_DATABASE" in os.environ: Model.metadata.drop_all(db_engine) # Create the database schema and the corresponding session. Model.metadata.create_all(db_engine) Session.configure(bind=db_engine) session = Session() def fin(): # type: () -> None """Explicitly close the session to avoid any dangling transactions.""" session.close() request.addfinalizer(fin) return session
def main(sys_argv=sys.argv): # type: (List[str]) -> None setup_signal_handlers() # get arguments parser = build_arg_parser("Grouper API Server") args = parser.parse_args(sys_argv[1:]) try: settings = ApiSettings.global_settings_from_config(args.config) setup_logging(args, settings.log_format) plugins = PluginProxy.load_plugins(settings, "grouper-api") set_global_plugin_proxy(plugins) except PluginsDirectoryDoesNotExist as e: logging.fatal("Plugin directory does not exist: %s", e) sys.exit(1) except Exception: logging.exception("Uncaught exception in startup") sys.exit(1) try: start_server(args, settings, plugins) except Exception: plugins.log_exception(None, None, *sys.exc_info()) logging.exception("Uncaught exception") finally: logging.info("end")
def main(sys_argv=sys.argv): # type: (List[str]) -> None setup_signal_handlers() # The curl HTTP client is required to support proxies. AsyncHTTPClient.configure(CurlAsyncHTTPClient) # get arguments parser = build_arg_parser("Grouper Web Server.") args = parser.parse_args(sys_argv[1:]) try: settings = FrontendSettings.global_settings_from_config(args.config) setup_logging(args, settings.log_format) plugins = PluginProxy.load_plugins(settings, "grouper-fe") set_global_plugin_proxy(plugins) except PluginsDirectoryDoesNotExist as e: logging.fatal("Plugin directory does not exist: {}".format(e)) sys.exit(1) except Exception: logging.exception("Uncaught exception in startup") sys.exit(1) try: start_server(args, settings, plugins) except Exception: plugins.log_exception(None, None, *sys.exc_info()) logging.exception("Uncaught exception") finally: logging.info("end")
def __init__(self, tmpdir): # type: (LocalPath) -> None self.settings = Settings() self.settings.database = db_url(tmpdir) self.plugins = PluginProxy([]) # Reinitialize the global plugin proxy with an empty set of plugins in case a previous test # initialized plugins. This can go away once a plugin proxy is injected into everything # that needs it instead of maintained as a global. set_global_plugin_proxy(self.plugins) self.initialize_database() self.session = SessionFactory(self.settings).create_session() self.graph = GroupGraph() session_factory = SingletonSessionFactory(self.session) self.repository_factory = GraphRepositoryFactory( self.settings, self.plugins, session_factory, self.graph) self.sql_repository_factory = SQLRepositoryFactory( self.settings, self.plugins, session_factory) self.service_factory = ServiceFactory(self.settings, self.plugins, self.repository_factory) self.usecase_factory = UseCaseFactory(self.settings, self.plugins, self.service_factory) self._transaction_service = self.service_factory.create_transaction_service( )
def api_app(session, standard_graph): # type: (Session, GroupGraph) -> GrouperApplication settings = ApiSettings() set_global_settings(settings) session_factory = SingletonSessionFactory(session) plugins = PluginProxy([]) set_global_plugin_proxy(plugins) usecase_factory = create_graph_usecase_factory(settings, plugins, session_factory, standard_graph) return create_api_application(standard_graph, settings, plugins, usecase_factory)
def test_user_created_plugin(setup: SetupTest): """Test calls to the user_created plugin.""" plugin = UserCreatedPlugin() # WARN: Relies on the user_created function being called from the global proxy. # Will need to change once everything uses an injected plugin proxy. set_global_plugin_proxy(PluginProxy([plugin])) with setup.transaction(): setup.create_user("*****@*****.**") assert plugin.calls == 1 plugin.expected_service_account = True setup.create_service_account("*****@*****.**", "owner", "machine set", "desc") assert plugin.calls == 2
def __init__(self, tmpdir): # type: (LocalPath) -> None self.settings = Settings() self.settings.database = db_url(tmpdir) self.plugins = PluginProxy([]) self.graph = GroupGraph() # Reinitialize the global plugin proxy with an empty set of plugins in case a previous test # initialized plugins. This can go away once a plugin proxy is injected into everything # that needs it instead of maintained as a global. set_global_plugin_proxy(self.plugins) self.initialize_database() self.open_database()
def start_processor(args, settings): # type: (Namespace, BackgroundSettings) -> None log_level = logging.getLevelName(logging.getLogger().level) logging.info("begin. log_level={}".format(log_level)) try: plugins = PluginProxy.load_plugins(settings, "grouper-background") set_global_plugin_proxy(plugins) except PluginsDirectoryDoesNotExist as e: logging.fatal("Plugin directory does not exist: {}".format(e)) sys.exit(1) # setup database logging.debug("configure database session") Session.configure(bind=get_db_engine(settings.database)) background = BackgroundProcessor(settings, plugins) background.run()
def start_processor(args, settings, sentry_client): # type: (Namespace, BackgroundSettings, SentryProxy) -> None log_level = logging.getLevelName(logging.getLogger().level) logging.info("begin. log_level={}".format(log_level)) try: plugins = PluginProxy.load_plugins(settings, "grouper-background") set_global_plugin_proxy(plugins) except PluginsDirectoryDoesNotExist as e: logging.fatal("Plugin directory does not exist: {}".format(e)) sys.exit(1) # setup database logging.debug("configure database session") Session.configure(bind=get_db_engine(settings.database)) background = BackgroundProcessor(settings, sentry_client) background.run()
def __init__(self, tmpdir): # type: (LocalPath) -> None self.settings = Settings() self.settings.database = db_url(tmpdir) self.plugin_proxy = PluginProxy([]) # Reinitialize the global plugin proxy with an empty set of plugins in case a previous test # initialized plugins. This can go away once a plugin proxy is injected into everything # that needs it instead of maintained as a global. set_global_plugin_proxy(self.plugin_proxy) self.initialize_database() self.session = SessionFactory(self.settings).create_session() self.graph = GroupGraph() self.repository_factory = GraphRepositoryFactory( self.settings, self.plugin_proxy, SingletonSessionFactory(self.session), self.graph ) self.service_factory = ServiceFactory(self.repository_factory) self.usecase_factory = UseCaseFactory(self.settings, self.service_factory) self._transaction_service = self.service_factory.create_transaction_service()
def start_server(args, settings, sentry_client): # type: (Namespace, FrontendSettings, SentryProxy) -> None log_level = logging.getLevelName(logging.getLogger().level) logging.info("begin. log_level={}".format(log_level)) assert not ( settings.debug and settings.num_processes > 1 ), "debug mode does not support multiple processes" try: plugins = PluginProxy.load_plugins(settings, "grouper-fe") set_global_plugin_proxy(plugins) except PluginsDirectoryDoesNotExist as e: logging.fatal("Plugin directory does not exist: {}".format(e)) sys.exit(1) # setup database logging.debug("configure database session") if args.database_url: settings.database = args.database_url Session.configure(bind=get_db_engine(settings.database)) application = create_fe_application(settings, args.deployment_name) ssl_context = plugins.get_ssl_context() if args.listen_stdin: logging.info( "Starting application server with %d processes on stdin", settings.num_processes ) server = HTTPServer(application, ssl_options=ssl_context) if PY2: s = socket.fromfd(sys.stdin.fileno(), socket.AF_INET, socket.SOCK_STREAM) s.setblocking(False) s.listen(5) else: s = socket.socket(fileno=sys.stdin.fileno()) s.setblocking(False) s.listen() server.add_sockets([s]) else: address = args.address or settings.address port = args.port or settings.port logging.info( "Starting application server with %d processes on %s:%d", settings.num_processes, address, port, ) server = HTTPServer(application, ssl_options=ssl_context) server.bind(port, address=address) # When using multiple processes, the forking happens here server.start(settings.num_processes) stats.set_defaults() # Create the Graph and start the graph update thread post fork to ensure each process gets # updated. with closing(Session()) as session: graph = Graph() graph.update_from_db(session) refresher = DbRefreshThread(settings, graph, settings.refresh_interval, sentry_client) refresher.daemon = True refresher.start() try: IOLoop.current().start() except KeyboardInterrupt: IOLoop.current().stop() finally: print("Bye")
def start_server(args, settings, sentry_client): # type: (Namespace, FrontendSettings, SentryProxy) -> None log_level = logging.getLevelName(logging.getLogger().level) logging.info("begin. log_level={}".format(log_level)) assert not (settings.debug and settings.num_processes > 1 ), "debug mode does not support multiple processes" try: plugins = PluginProxy.load_plugins(settings, "grouper-fe") set_global_plugin_proxy(plugins) except PluginsDirectoryDoesNotExist as e: logging.fatal("Plugin directory does not exist: {}".format(e)) sys.exit(1) # setup database logging.debug("configure database session") if args.database_url: settings.database = args.database_url Session.configure(bind=get_db_engine(settings.database)) application = create_fe_application(settings, args.deployment_name) ssl_context = plugins.get_ssl_context() if args.listen_stdin: logging.info("Starting application server with %d processes on stdin", settings.num_processes) server = HTTPServer(application, ssl_options=ssl_context) if PY2: s = socket.fromfd(sys.stdin.fileno(), socket.AF_INET, socket.SOCK_STREAM) s.setblocking(False) s.listen(5) else: s = socket.socket(fileno=sys.stdin.fileno()) s.setblocking(False) s.listen() server.add_sockets([s]) else: address = args.address or settings.address port = args.port or settings.port logging.info( "Starting application server with %d processes on %s:%d", settings.num_processes, address, port, ) server = HTTPServer(application, ssl_options=ssl_context) server.bind(port, address=address) # When using multiple processes, the forking happens here server.start(settings.num_processes) stats.set_defaults() # Create the Graph and start the graph update thread post fork to ensure each process gets # updated. with closing(Session()) as session: graph = Graph() graph.update_from_db(session) refresher = DbRefreshThread(settings, graph, settings.refresh_interval, sentry_client) refresher.daemon = True refresher.start() try: IOLoop.current().start() except KeyboardInterrupt: IOLoop.current().stop() finally: print("Bye")
def main(sys_argv=sys.argv, session=None): # type: (List[str], Optional[Session]) -> None description_msg = "Grouper Control" parser = argparse.ArgumentParser(description=description_msg) parser.add_argument( "-c", "--config", default=default_settings_path(), help="Path to config file." ) parser.add_argument( "-d", "--database-url", type=str, default=None, help="Override database URL in config." ) parser.add_argument( "-q", "--quiet", action="count", default=0, help="Decrease logging verbosity." ) parser.add_argument( "-v", "--verbose", action="count", default=0, help="Increase logging verbosity." ) parser.add_argument( "-V", "--version", action="version", version="%%(prog)s %s" % __version__, help="Display version information.", ) subparsers = parser.add_subparsers(dest="command") CtlCommandFactory.add_all_parsers(subparsers) # Add parsers for legacy commands that have not been refactored. for subcommand_module in [group, oneoff, service_account, shell]: subcommand_module.add_parser(subparsers) # type: ignore args = parser.parse_args(sys_argv[1:]) # Construct the CtlSettings object used for all commands, and set it as the global Settings # object. All code in grouper.ctl.* takes the CtlSettings object as an argument if needed, but # it may call other legacy code that requires the global Settings object be present. settings = CtlSettings.global_settings_from_config(args.config) if args.database_url: settings.database = args.database_url # Construct a session factory, which is passed into all the legacy commands that haven't been # converted to usecases yet. if session: session_factory = SingletonSessionFactory(session) # type: SessionFactory else: session_factory = SessionFactory(settings) log_level = get_loglevel(args, base=logging.INFO) logging.basicConfig(level=log_level, format=settings.log_format) if log_level < 0: sa_log.setLevel(logging.INFO) # Initialize plugins. The global plugin proxy is used by legacy code. try: plugins = PluginProxy.load_plugins(settings, "grouper-ctl") except PluginsDirectoryDoesNotExist as e: logging.fatal("Plugin directory does not exist: {}".format(e)) sys.exit(1) set_global_plugin_proxy(plugins) # Set up factories. usecase_factory = create_sql_usecase_factory(settings, plugins, session_factory) command_factory = CtlCommandFactory(settings, usecase_factory) # Old-style subcommands store a func in callable when setting up their arguments. New-style # subcommands are handled via a factory that constructs and calls the correct object. if getattr(args, "func", None): args.func(args, settings, session_factory) else: command = command_factory.construct_command(args.command) command.run(args)
def main(sys_argv=sys.argv, session=None): # type: (List[str], Optional[Session]) -> None description_msg = "Grouper Control" parser = argparse.ArgumentParser(description=description_msg) parser.add_argument("-c", "--config", default=default_settings_path(), help="Path to config file.") parser.add_argument("-d", "--database-url", type=str, default=None, help="Override database URL in config.") parser.add_argument("-q", "--quiet", action="count", default=0, help="Decrease logging verbosity.") parser.add_argument("-v", "--verbose", action="count", default=0, help="Increase logging verbosity.") parser.add_argument( "-V", "--version", action="version", version="%%(prog)s %s" % __version__, help="Display version information.", ) subparsers = parser.add_subparsers(dest="command") CtlCommandFactory.add_all_parsers(subparsers) # Add parsers for legacy commands that have not been refactored. for subcommand_module in [group, oneoff, service_account, shell]: subcommand_module.add_parser(subparsers) # type: ignore args = parser.parse_args(sys_argv[1:]) # Construct the CtlSettings object used for all commands, and set it as the global Settings # object. All code in grouper.ctl.* takes the CtlSettings object as an argument if needed, but # it may call other legacy code that requires the global Settings object be present. settings = CtlSettings.global_settings_from_config(args.config) if args.database_url: settings.database = args.database_url # Construct a session factory, which is passed into all the legacy commands that haven't been # converted to usecases yet. if session: session_factory = SingletonSessionFactory( session) # type: SessionFactory else: session_factory = SessionFactory(settings) log_level = get_loglevel(args, base=logging.INFO) logging.basicConfig(level=log_level, format=settings.log_format) if log_level < 0: sa_log.setLevel(logging.INFO) # Initialize plugins. The global plugin proxy is used by legacy code. try: plugins = PluginProxy.load_plugins(settings, "grouper-ctl") except PluginsDirectoryDoesNotExist as e: logging.fatal("Plugin directory does not exist: {}".format(e)) sys.exit(1) set_global_plugin_proxy(plugins) # Set up factories. usecase_factory = create_sql_usecase_factory(settings, plugins, session_factory) command_factory = CtlCommandFactory(settings, usecase_factory) # Old-style subcommands store a func in callable when setting up their arguments. New-style # subcommands are handled via a factory that constructs and calls the correct object. if getattr(args, "func", None): args.func(args, settings, session_factory) else: command = command_factory.construct_command(args.command) command.run(args)