def start_server(args, sentry_client): # type: (Namespace, SentryProxy) -> None log_level = logging.getLevelName(logging.getLogger().level) logging.info("begin. log_level={}".format(log_level)) assert not (settings.debug and settings.num_processes > 1 ), "debug mode does not support multiple processes" try: initialize_plugins(settings.plugin_dirs, settings.plugin_module_paths, "grouper_fe") except PluginsDirectoryDoesNotExist as e: logging.fatal("Plugin directory does not exist: {}".format(e)) sys.exit(1) # setup database logging.debug("configure database session") database_url = args.database_url or get_database_url(settings) Session.configure(bind=get_db_engine(database_url)) usecase_factory = create_graph_usecase_factory(settings, Session()) application = create_fe_application(settings, usecase_factory, args.deployment_name) address = args.address or settings.address port = args.port or settings.port ssl_context = get_plugin_proxy().get_ssl_context() logging.info("Starting application server with %d processes on port %d", settings.num_processes, port) server = tornado.httpserver.HTTPServer(application, ssl_options=ssl_context) server.bind(port, address=address) # When using multiple processes, the forking happens here server.start(settings.num_processes) stats.set_defaults() # Create the Graph and start the config / graph update threads post fork to ensure each # process gets updated. settings.start_config_thread(args.config, "fe") with closing(Session()) as session: graph = Graph() graph.update_from_db(session) refresher = DbRefreshThread(settings, graph, settings.refresh_interval, sentry_client) refresher.daemon = True refresher.start() try: tornado.ioloop.IOLoop.instance().start() except KeyboardInterrupt: tornado.ioloop.IOLoop.instance().stop() finally: print("Bye")
def fe_app(session, standard_graph, tmpdir): # type: (Session, GroupGraph, LocalPath) -> GrouperApplication settings = Settings({"debug": False}) usecase_factory = create_graph_usecase_factory(settings, session, standard_graph) return create_fe_application( settings, usecase_factory, "", xsrf_cookies=False, session=lambda: session )
def api_app(session, standard_graph): # type: (Session, GroupGraph) -> GrouperApplication settings = ApiSettings() set_global_settings(settings) session_factory = SingletonSessionFactory(session) usecase_factory = create_graph_usecase_factory( settings, PluginProxy([]), session_factory, standard_graph ) return create_api_application(standard_graph, settings, usecase_factory)
def start_server(args, settings, plugins): # type: (Namespace, ApiSettings, PluginProxy) -> None log_level = logging.getLevelName(logging.getLogger().level) logging.info("begin. log_level={}".format(log_level)) assert not ( settings.debug and settings.num_processes > 1 ), "debug mode does not support multiple processes" # setup database logging.debug("configure database session") if args.database_url: settings.database = args.database_url Session.configure(bind=get_db_engine(settings.database)) with closing(Session()) as session: graph = Graph() graph.update_from_db(session) refresher = DbRefreshThread(settings, plugins, graph, settings.refresh_interval) refresher.daemon = True refresher.start() usecase_factory = create_graph_usecase_factory(settings, plugins, graph=graph) application = create_api_application(graph, settings, plugins, usecase_factory) if args.listen_stdin: logging.info("Starting application server on stdin") server = HTTPServer(application) if PY2: s = socket.fromfd(sys.stdin.fileno(), socket.AF_INET, socket.SOCK_STREAM) s.setblocking(False) s.listen(5) else: s = socket.socket(fileno=sys.stdin.fileno()) s.setblocking(False) s.listen() server.add_sockets([s]) else: address = args.address or settings.address port = args.port or settings.port logging.info("Starting application server on %s:%d", address, port) server = HTTPServer(application) server.bind(port, address=address) server.start(settings.num_processes) stats.set_defaults() try: IOLoop.current().start() except KeyboardInterrupt: IOLoop.current().stop() finally: print("Bye")
def initialize(self, *args: Any, **kwargs: Any) -> None: self.graph = Graph() self.session = self.settings["session"]() # type: Session self.template_engine = self.settings[ "template_engine"] # type: FrontendTemplateEngine self.plugins = get_plugin_proxy() session_factory = SingletonSessionFactory(self.session) self.usecase_factory = create_graph_usecase_factory( settings(), self.plugins, session_factory) if self.get_argument("_profile", False): self.perf_collector = Collector() self.perf_trace_uuid = str(uuid4()) # type: Optional[str] self.perf_collector.start() else: self.perf_collector = None self.perf_trace_uuid = None self._request_start_time = datetime.utcnow()
def initialize(self, *args, **kwargs): # type: (*Any, **Any) -> None self.graph = Graph() self.session = self.settings["session"]() # type: Session self.template_engine = self.settings["template_engine"] # type: FrontendTemplateEngine self.plugins = get_plugin_proxy() session_factory = SingletonSessionFactory(self.session) self.usecase_factory = create_graph_usecase_factory( settings(), self.plugins, session_factory ) if self.get_argument("_profile", False): self.perf_collector = Collector() self.perf_trace_uuid = str(uuid4()) # type: Optional[str] self.perf_collector.start() else: self.perf_collector = None self.perf_trace_uuid = None self._request_start_time = datetime.utcnow() stats.log_rate("requests", 1) stats.log_rate("requests_{}".format(self.__class__.__name__), 1)
def initialize(self, *args, **kwargs): # type: (*Any, **Any) -> None self.graph = Graph() self.session = self.settings["session"]() # type: Session self.template_engine = self.settings[ "template_engine"] # type: FrontendTemplateEngine self.plugins = get_plugin_proxy() session_factory = SingletonSessionFactory(self.session) self.usecase_factory = create_graph_usecase_factory( settings(), self.plugins, session_factory) if self.get_argument("_profile", False): self.perf_collector = Collector() self.perf_trace_uuid = str(uuid4()) # type: Optional[str] self.perf_collector.start() else: self.perf_collector = None self.perf_trace_uuid = None self._request_start_time = datetime.utcnow() stats.log_rate("requests", 1) stats.log_rate("requests_{}".format(self.__class__.__name__), 1) logging.error("initialized")
def start_server(args, settings, sentry_client): # type: (Namespace, ApiSettings, SentryProxy) -> None log_level = logging.getLevelName(logging.getLogger().level) logging.info("begin. log_level={}".format(log_level)) assert not ( settings.debug and settings.num_processes > 1 ), "debug mode does not support multiple processes" try: plugins = PluginProxy.load_plugins(settings, "grouper-api") set_global_plugin_proxy(plugins) except PluginsDirectoryDoesNotExist as e: logging.fatal("Plugin directory does not exist: {}".format(e)) sys.exit(1) # setup database logging.debug("configure database session") if args.database_url: settings.database = args.database_url Session.configure(bind=get_db_engine(settings.database)) with closing(Session()) as session: graph = Graph() graph.update_from_db(session) refresher = DbRefreshThread(settings, graph, settings.refresh_interval, sentry_client) refresher.daemon = True refresher.start() usecase_factory = create_graph_usecase_factory(settings, plugins, graph=graph) application = create_api_application(graph, settings, usecase_factory) if args.listen_stdin: logging.info("Starting application server on stdin") server = HTTPServer(application) if PY2: s = socket.fromfd(sys.stdin.fileno(), socket.AF_INET, socket.SOCK_STREAM) s.setblocking(False) s.listen(5) else: s = socket.socket(fileno=sys.stdin.fileno()) s.setblocking(False) s.listen() server.add_sockets([s]) else: address = args.address or settings.address port = args.port or settings.port logging.info("Starting application server on %s:%d", address, port) server = HTTPServer(application) server.bind(port, address=address) server.start(settings.num_processes) stats.set_defaults() try: IOLoop.current().start() except KeyboardInterrupt: IOLoop.current().stop() finally: print("Bye")
def api_app(session, standard_graph): # type: (Session, GroupGraph) -> GrouperApplication settings = Settings({"debug": False}) usecase_factory = create_graph_usecase_factory(settings, session, standard_graph) return create_api_application(standard_graph, settings, usecase_factory)