def log_config(): """Logs the config used to start the application""" conf = '\n'.join([ '{}="{}"'.format(k, v) for k, v in sorted(options.as_dict().iteritems()) ]) logging.info('Service started with the following settings:\n' + conf)
def run_app(): options.parse_command_line() # 1. Create sync router sync_router = sockjs.tornado.SockJSRouter(SyncConnection, '/sync') # 2. Create Tornado application app = Application( [ (r'/', IndexHandler), (r'/(?P<pad_id>[0-9a-z\-]+)', PadHandler), (r'/static/(.*)', tornado.web.StaticFileHandler, {'path': STATIC_ROOT}) ] + sync_router.urls, cookie_secret=options.cookie_secret, static_url_prefix='/static/', template_path=TEMPLATES_ROOT, pad_lifetime=options.pad_lifetime, ) # 3. Make Tornado app listen on port 8080 app.listen(options.port) logging.info("App started, visit http://localhost:%s" % options.port) logging.info("App started with options: %r" % options.as_dict()) # 4. Start IOLoop tornado.ioloop.IOLoop.instance().start()
def __init__(self): settings = options.as_dict() self._count = 0 # 心跳次数 self._interval = 1 # 服务心跳执行时间间隔(秒) self._print_interval = settings.get('HEARTBEAT_INTERVAL', 0) # 心跳打印时间间隔(秒),0为不打印 self._broadcast_interval = settings.get('HEARTBEAT_BROADCAST', 0) # 心跳广播间隔(秒),0为不广播 self._tasks = {} # 跟随心跳执行的回调任务列表,由 self.register 注册 {task_id: {...}}
def load_config(path=None): ''' This extends the tornado parser to enable use in heroku where options are accessed through os.getenv Will read file at path if exists Will then read environment variables to override Will then parse command line to override ''' if path is not None and os.path.isfile(path): logging.info("loading config from %s", path) parse_config_file(path) for k in options.as_dict(): ''' danger: access of private variables ''' value = os.getenv(k) if value: name = options._normalize_name(k) option = options._options.get(name) option.parse(value) parse_command_line()
def pretty_settings_logging(): ''' Output a pretty format server settings. ''' return '\n'.join( [key+' : '+str(value) for key, value in options.as_dict().iteritems()] )
def options_parse_environment(): # Used for defining vars in cloud environment # Takes priority over rootthebox.cfg variables if os.environ.get("PORT", None) is not None: # Heroku uses $PORT to define listen_port options.listen_port = int(os.environ.get("PORT")) logging.info("Environment Configuration (PORT): %d" % options.listen_port) images = ["ctf_logo", "story_character", "scoreboard_right_image"] for item in options.as_dict(): config = os.environ.get(item.upper(), os.environ.get(item, None)) if config is not None: if item in images: value = save_config_image(config) else: value = config value = set_type(value, options[item]) if isinstance(value, type(options[item])): logging.info("Environment Configuration (%s): %s" % (item.upper(), value)) options[item] = value else: logging.error( "Environment Confirguation (%s): unable to convert type %s to %s for %s" % (item.upper(), type(value), type(options[item]), value)) if os.environ.get("DEMO"): setup_xml(["setup/demo_juiceshop.xml"]) from libs.ConfigHelpers import create_demo_user logging.info("Setting Up Demo Environment...") create_demo_user() options.autostart_game = True
def pretty_options_output(): ''' Output options in a pretty format. ''' pretty_options = ('{0}-----{1}'.format(key, value) for key, value in options.as_dict().iteritems()) logging.info('[SERVER SETTINGS]') map(logging.info, pretty_options) logging.info('#'.join(('' for index in xrange(55))))
def main(): application = tornado.web.Application([ (r"/", MainHandler), ], **options.as_dict()) http_server = tornado.httpserver.HTTPServer(application, xheaders=True) http_server.listen(options.port) tornado.ioloop.IOLoop.instance().start()
def write_worker(self): logger.debug('write start {}'.format(self.id_ if self.id_ else '')) engine = models.get_engine(**options.as_dict()) with contextlib.closing(models.get_session(engine)) as session: t = models.TestTable.random(session) session.add(t) session.commit() logger.debug('write stop {}'.format(self.id_ if self.id_ else ''))
def criar_rotas(): """Esta função é responsavel por criar as rotas do app do Tornado.""" routes = [ URLSpec(r'/garcom-sincrono', GarcomSincrono), URLSpec(r'/garcom-assincrono', GarcomAssincrono) ] return Application(routes, **options.as_dict())
def __init__(self): super(WebApplication, self).__init__() self.http_server = None self.web = Application( **{k: v for k, v in options.as_dict().items() if v is not None}) self.web.http_router = HttpRouter() self.web.socket_router = self.socket_router self.web.socet_session_listener = self.socket_session_listener
def __init__(self, delegate, config=options.as_dict(), *args, **kwargs): super(SSSocksProxy, self).__init__(*args, **kwargs) self.delegate = delegate self.config = config self.config['auto-time'] = 3600 # seconds if self.config['autoshadow']: self.auto_shadow_init(kwargs.get('io_loop')) else: self._shadow_load()
def create_app(): """ Create instance of tornado.web.Application. """ routes = [ URLSpec(r'/async', MainHandlerAsync), URLSpec(r"/block", MainHandlerBlocking) ] return Application(routes, **options.as_dict())
def get(self): """Read server parameter :status 200: ok """ data = options.as_dict() data.update({'rootpath': os.path.dirname(__file__), 'memory': resource.getrusage(resource.RUSAGE_SELF).ru_maxrss}) self.write(data)
def from_config(cls): data = options.as_dict().copy() return dict( url=data.pop('REDIS_URL'), db=data.pop('REDIS_DB', None), password=data.pop('REDIS_PWD', None), minsize=data.pop('REDIS_MIN', 5), maxsize=data.pop('REDIS_MAX', 10), )
def main(argv): '''Main method''' define("host", default='localhost', help="tornado host", type=str) define("port", default=1027, help="tornado port", type=int) define("server_cert", default="../testCerts/server.crt", help="server certificate", type=str) define("server_key", default="../testCerts/server.key", help="server key", type=str) define("ca_cert", default="../testCerts/CAcert.pem", help="CA certificate", type=str) define("mq_host", default="127.0.0.1", help="MQ server host", type=str) define("mq_port", default=61613, help="MQ server port", type=int) define("mq_queue", default="/queue/test", help="MQ queue name", type=str) define("mq_user", default='guest', help="MQ username", type=str) define("mq_password", default='guest', help="MQ password", type=str) define("dn_filename", default='Test_DN.json', help="path to file with valid DNs", type=str) define("config", type=str, help="path to config file", callback=lambda path: options.parse_config_file(path, final=False)) options.parse_command_line() files_to_check = [ options.server_cert, options.server_key, options.ca_cert, options.dn_filename ] network = { 'Tornado': (options.host, options.port), 'MQ': (options.mq_host, options.mq_port) } are_params_valid(files_to_check, options.server_cert, options.ca_cert, network) print "options loaded:%s" % str(options.as_dict()) print "STARTING TORNADO SERVER! Host:%s, Port:%i" % (options.host, options.port) app = make_app() ssl_ctx = generate_ssl_context(options.server_cert, options.server_key, options.ca_cert) http_server = tornado.httpserver.HTTPServer(app, ssl_options=ssl_ctx) http_server.listen(options.port) tornado.ioloop.IOLoop.current().start()
def read_worker(self): logger.debug('read start {}'.format(self.id_ if self.id_ else '')) engine = models.get_engine(**options.as_dict()) with contextlib.closing(models.get_session(engine)) as session: query = session.query(models.TestTable)\ .order_by(models.TestTable.value)\ .filter(models.TestTable.value > models.TestTable._rand_lim) for inst in query: inst logger.debug('read stop {}'.format(self.id_ if self.id_ else ''))
def run(): options.parse_command_line() r = Reactor(**options.as_dict()) signal.signal(signal.SIGTERM, r.stop) signal.signal(signal.SIGINT, r.stop) signal.signal(signal.SIGHUP, r.reinit) r.start()
def from_config(cls): data = options.as_dict().copy() return dict( host=data.pop('DB_HOST'), port=data.pop('DB_PORT', 3306), user=data.pop('DB_USER'), password=data.pop('DB_PWD'), max_connections=data.pop('DB_MAX', 10), database=data.pop('DB_NAME', 10), )
def get(self): description = """ <p>Copyright 2015 Holmes Processing <p>Description: Gathers ASN information for an IP address <p>Configuration: {} """.format(options.as_dict()) self.write(description)
def init(): engine = models.get_engine(**options.as_dict()) models.Base.metadata.create_all(engine) if options.truncate: with contextlib.closing(models.get_session(engine)) as session: models.TestTable.truncate(session) if options.init_records: with contextlib.closing(models.get_session(engine)) as session: session.bulk_save_objects([models.TestTable.random(session) for i in xrange(options.init_records)]) session.commit() logger.debug('initiated')
def run(): options.parse_command_line() reactor = Reactor(**options.as_dict()) signal.signal(signal.SIGTERM, reactor.stop) signal.signal(signal.SIGINT, reactor.stop) if hasattr(signal, 'SIGHUP'): signal.signal(signal.SIGHUP, reactor.reinit) reactor.start()
def run(): boomtrain.logging.setup() options.parse_command_line() r = Reactor(**options.as_dict()) signal.signal(signal.SIGTERM, r.stop) signal.signal(signal.SIGINT, r.stop) signal.signal(signal.SIGHUP, r.reinit) r.start()
def start(self, app): application = app(**options.as_dict()) server = HTTPServer(application) server.listen(options.port, options.host) shutdown_handler = lambda sig, frame: shutdown(server) signal.signal(signal.SIGINT, shutdown_handler) signal.signal(signal.SIGTERM, shutdown_handler) logging.info(u"Starting push server on {0}:{1}.".format( options.host, options.port )) IOLoop.instance().start()
def __init__(self): settings = options.as_dict() self._host = settings.get('MQ_HOST', None) self._port = settings.get('MQ_PORT', None) self._username = settings.get('MQ_USER', None) self._pwd = settings.get('MQ_PWD', None) self._connected = False # If connect success. self._protocol = None self._channel = None # Connection channel. # Register a loop run task to check TCP connection's healthy. heartbeat.register(self._check_connection, 60)
async def connection(cls): option_data = options.as_dict() db_dict = dict( db=option_data.get('REDIS_DB', None), password=option_data.get('REDIS_PWD', None), minsize=option_data.get('REDIS_MIN', 5), maxsize=option_data.get('REDIS_MAX', 10), ) conn = await aioredis.create_redis_pool(option_data.get('REDIS_URL'), encoding='utf8', **db_dict) cls.conn = conn return conn
def get_options(opts=None, group=None): if opts: register_opts(opts, group) options = register_opts(common_opts, 'common') if options.as_dict().get('extra_opts', ''): try: extra_opts = __import__(options.extra_opts) options = register_opts(extra_opts.config.opts, 'extra') except Exception as e: print "get config error msg %r" % e parse_config_file(options.config, final=False) parse_command_line() return options
def get(self): """Read server parameter :status 200: ok """ data = options.as_dict() data.update({ 'rootpath': os.path.dirname(__file__), 'memory': resource.getrusage(resource.RUSAGE_SELF).ru_maxrss }) self.write(data)
def get_options(opts=None, group=None): if opts: register_opts(opts, group) options = register_opts(common_opts, 'common') if options.as_dict().get('extra_opts', ''): try: extra_opts = __import__(options.extra_opts) options = register_opts(extra_opts.config.opts, 'extra') except: print "get config error" parse_config_file(options.config, final=False) parse_command_line() return options
def create_app(): """ Create Lex instance of tornado.web.Application. """ routes = [ URLSpec(r'/?', RootHandler), URLSpec(r'/healthcheck/?', HealthcheckHandler), URLSpec(r'/version/?', VersionHandler), URLSpec(r'/recommendation/?', NewsHandler), URLSpec(r'/docs/?(.*)', StaticFileHandler, {'path': options["template_path"], 'default_filename': 'index.html'}), URLSpec(r'/_static/?(.*)', StaticFileHandler, {'path': options["static_path"], 'default_filename': 'favicon.ico'}) ] return Application(routes, **options.as_dict())
def main(): """ Main entry point for my service. :return: """ # pylint: disable=global-statement global APISERVER config.define_options() # Attempt to load config from config file try: parse_config_file("server.conf") except IOError: errmsg = ( "{} doesn't exist or couldn't be opened. Using defaults.".format( options.conf_file_path)) logging.warn(errmsg) logging.info(options.as_dict()) platform = Platform.factory(CLOUDERA) endpoints = platform.discover(options) if not endpoints: logging.error("Failed to discover API endpoints of cluster") db_store = HDBDataStore(endpoints['HDFS'].geturl(), endpoints['HBASE'].geturl(), options.thrift_port, options.datasets_table, options.data_repo) routes = get_routes(dataservice) logging.info("Service Routes %s", routes) settings = dict() APISERVER = tornado.httpserver.HTTPServer( Application(routes=routes, settings=settings, db_conn=db_store)) for port in options.ports: try: logging.debug( "Attempting to bind for dataset dataset on port:%d and address %s", port, options.bind_address) APISERVER.listen(port, options.bind_address) logging.info("Awesomeness is listening on:%s", port) break except socket.error: logging.warn("Not able to bind on port:%d", port) else: logging.warn("No free port available to bind dataset") signal.signal(signal.SIGTERM, sig_handler) signal.signal(signal.SIGINT, sig_handler) # keep collecting dataset tornado.ioloop.PeriodicCallback(db_store.collect, options.sync_period).start() # db_conn2.collect() tornado.ioloop.IOLoop.instance().start()
def checkFileDir(): if not os.path.isdir(options.fileDir): if os.path.exists(options.fileDir): raise ValueError("Path %s is not a valid directory" % options.fileDir) os.mkdir(options.fileDir) testFile = os.path.join(options.fileDir, "write_test_file.txt") testText = json.dumps(options.as_dict(), indent = 4) with open(testFile, "wb") as f: f.write(testText) with open(testFile, "rb") as f: data = f.read() if data != testText: raise ValueError("Write data and read data not match.") os.remove(testFile)
def parse_config_file(): config = configparser.ConfigParser() config.read(options.config) define("port", default=int(config.get('Main', 'port', fallback=8888)), type=int, help='port on with to run http server') define('storage-path', default=config.get('Main', 'storage_path', fallback='/tmp/hosting_app'), help='path where to store all the folders and files') define( 'locking', default=True, type=bool, help= 'Enable this option to prevent system from conflicts with multiple uploads from multiple users' 'When the file is uploaded it is stored not in the final location, but in the /tmp dir,' 'in case if peed disconnects or something goes wrong the file simply deleted with no damage to the system.' 'It also hiding when doing "GET folder" requests. Imagine that you upload big file to the /a/b/c/d/huge.file' 'Without locking one could simply create a tiny file in /a/b and this would conflict with huge file when' 'upload is finished, so huge file will be simply discarded. To prevent this behavior enable locking' ) define( 'file-size-limit', default=int(config.get('Main', 'file_size_limit', fallback=4096)), type=int, help='Upper limit for files Xchange in MB. ' 'Can be bigger then amount of RAM on your machine ' 'because files are not buffered entirely before processing but instead processed by chunks up to 16 KB' ) define('db_file', default=config.get('Database', 'db_file'), help='Full path to sqlite3 db file') define( 'sqlite_closure_table_so', default=config.get('Database', 'sqlite_closure_table_so'), help='' 'Path to compiled shared library ' 'for sqlite transitive closure table. ' 'To obtain this file perform the following actions:\n' '$ git clone https://gist.github.com/coleifer/7f3593c5c2a645913b92 closure\n' '$ cd closure/\n' '$ gcc -g -fPIC -shared closure.c -o closure.so') for logging_option in options.as_dict().keys(): if logging_option.startswith( 'log') and logging_option in config['Logging']: options.__setattr__(logging_option, config['Logging'][logging_option])
def init_app_settings(): """ 命令行参数优先, 会覆盖配置文件中的参数。 """ global _app_settings if _app_settings: return _app_settings # 只为了获取 options.config, 后边会再次调用. options.parse_command_line(final=False) config_file_path = options.config if config_file_path: options.parse_config_file(config_file_path, final=False) options.parse_command_line(final=False) options.run_parse_callbacks() _app_settings = options.as_dict() return _app_settings
def main(): """ Main entry point for my service. :return: """ # pylint: disable=global-statement global APISERVER config.define_options() # Attempt to load config from config file try: parse_config_file("server.conf") except IOError: errmsg = ("{} doesn't exist or couldn't be opened. Using defaults." .format(options.conf_file_path)) logging.warn(errmsg) logging.info(options.as_dict()) platform = Platform.factory(options.hadoop_distro) endpoints = platform.discover(options) if not endpoints: logging.error("Failed to discover API endpoints of cluster") db_store = HDBDataStore(endpoints['HDFS'].geturl(), endpoints['HBASE'].geturl(), options.thrift_port, options.datasets_table, options.data_repo) routes = get_routes(dataservice) logging.info("Service Routes %s", routes) settings = dict() APISERVER = tornado.httpserver.HTTPServer( Application(routes=routes, settings=settings, db_conn=db_store)) for port in options.ports: try: logging.debug("Attempting to bind for dataset dataset on port:%d and address %s", port, options.bind_address) APISERVER.listen(port, options.bind_address) logging.info("Awesomeness is listening on:%s", port) break except socket.error: logging.warn("Not able to bind on port:%d", port) else: logging.warn("No free port available to bind dataset") signal.signal(signal.SIGTERM, sig_handler) signal.signal(signal.SIGINT, sig_handler) # keep collecting dataset tornado.ioloop.PeriodicCallback(db_store.collect, options.sync_period).start() # db_conn2.collect() tornado.ioloop.IOLoop.instance().start()
def __init__(self, ping=None, bool_gzip=True): ''' :param ping: 指定发送的ping内容,默认为13位时间戳 :param bool_gzip: 是否压缩,默认为压缩 ''' if not hasattr(user_infos, 'PingMiddleware'): user_infos['PingMiddleware'] = {} self.user_infos = user_infos['PingMiddleware'] self.bool_gizp = bool_gzip if not ping: self.ping = round(time.time() * 1000) else: self.ping = ping self.interval = options.as_dict().get('BEAT_PING_INTERVAL', 0) if self.interval: heartbeat.register(self.beat_ping, self.interval)
def parse_command_line(args=None, options_dict=None): if args is None: args = sys.argv if options_dict is None: options_dict = options.as_dict() for i in range(1, len(args)): # All things after the last option are command line arguments if not args[i].startswith("-"): break arg = args[i].lstrip("-") name, equals, value = arg.partition("=") if options_dict.has_key(name) and options_dict.get(name) != value: setattr(options, name, value)
def parse_command_line(args=None, options_dict=None): if args is None: args = sys.argv if options_dict is None: options_dict = options.as_dict() for i in range(1, len(args)): # All things after the last option are command line arguments if not args[i].startswith("-"): break arg = args[i].lstrip("-") name, equals, value = arg.partition("=") if options_dict.has_key(name) and options_dict.get(name)!=value: setattr(options, name, value)
def start_service(): global Log options.parse_command_line() Log = logging.getLogger() if options.debug: Log.setLevel(logging.DEBUG) required_options = ('port', 'fe_path', 'db_host', 'db_port', 'db_name', 'db_user', 'db_pw') for opt in required_options: if getattr(options, opt) is None: Log.error('Missing {} parameter'.format(opt)) options.print_help() return 2 Log.info('Starting Pinterest clone app') for opt, val in options.as_dict().iteritems(): Log.info(' {}={}'.format(opt, val)) # enable clean shutdown via kill signal.signal(signal.SIGTERM, stopService) ioloop.IOLoop.instance().run_sync(init_db) service = PinterestAPI(options.fe_path, Log) if options.certfile and options.keyfile: http_server = httpserver.HTTPServer(service, ssl_options={'keyfile': options.keyfile, 'certfile': options.certfile}) else: http_server = httpserver.HTTPServer(service, xheaders=True) http_server.listen(options.port) try: ioloop.IOLoop.instance().start() except KeyboardInterrupt: pass stopService(signal.SIGINT, None)
def main() -> None: parse_command_line() io_loop = IOLoop.instance() if options.debug: tornado.autoreload.start() for option, value in sorted(options.as_dict().items()): gen_log.debug(f"Option: {option}: {value}") ari_client = AriClient() io_loop.add_callback(ari_client.run) if options.ping_interval: ping_periodic_callback = PeriodicCallback(do_ping, options.ping_interval * 1000) ping_periodic_callback.start() io_loop.start()
def main(): parse_command_line(final=False) parse_config_file(os.path.join(options.config_dir, "friendfeed.cfg"), final=False) parse_config_file(os.path.join(options.config_dir, "secrets.cfg"), final=False) define("xsrf_cookies", default=True) define("db", default=getattr(motor.motor_tornado.MotorClient(), options.database_collection)) if options.debug: enable_pretty_logging() # print(options.as_dict()) app = tornado.web.Application( [ url(r"/", handlers.MainHandler), url(r"/test", TestHandler), # twitter url(r"/login/twitter", twitter.TwitterLogin, name="twitter_login"), url(r"/logout/twitter", twitter.TwitterLogout, name="twitter_logout"), url(r"/feed/twitter", twitter.TwitterFeedHandler, name="twitter_feed"), url(r"/stream/twitter", twitter.TwitterStreamHandler, name="twitter_stream"), url(r"/test/twitter", twitter.TwitterStreamTest), # facebook ], **options.as_dict()) app.listen(options.port, options.ip)
def run(): options.parse_command_line() options_dict = options.as_dict() if not options_dict.get('config', None): if os.path.isfile(DEFAULT_CONFIG_PATH): options_dict['config'] = DEFAULT_CONFIG_PATH else: LOGGER.error("Config file is required.") print_help() sys.exit(1) reactor = Reactor(**options_dict) stop = lambda *args: reactor.stop() reinit = lambda *args: reactor.reinit() signal.signal(signal.SIGTERM, stop) signal.signal(signal.SIGINT, stop) if hasattr(signal, 'SIGHUP'): signal.signal(signal.SIGHUP, reinit) reactor.start()
def main(): options.parse_command_line() if options.config: options.parse_config_file(options.config) if options.proxy == "http": app = tornado.web.Application([ (r'.*', SSHttpProxyHandler), ]) app.listen(options.port) elif options.proxy == "socks5": server = SSSocksProxy(Socks5Channel) server.listen(options.port) elif options.proxy == "shadow": if not options.shadow_server or not options.shadow_port or \ not options.shadow_method: logging.error("shadow options is not correct") sys.exit(2) server = SSSocksProxy(ShadowChannel, config=options.as_dict()) server.listen(options.port) logging.info("Starting proxy %s:%d", options.proxy, options.port) loop = tornado.ioloop.IOLoop.instance() loop.start()
return def make_app(settings): return tornado.web.Application([ (r"/register", RegisterAppClient), (r"/prediction/twitter", AddTwitterPrediction), (r"/prediction/twitter/wager/(.*)", AddTwitterPredictionWager), (r"/prediction/twitter/comment/(.*)", AddTwitterPredictionComment), (r"/prediction/twitter/(.*)", ShowTwitterPrediction), (r"/user/withwagers/(.*)", ShowUserProfileWithWagers), (r"/user/onlyundecided/(.*)", ShowUserProfileOnlyUndecided), (r"/user/(.*)", ShowUserProfile), (r"/confirm/twitter/ask", TwitterConfirms.AskTwitterPrediction), (r"/confirm/twitter/confirm", TwitterConfirms.ConfirmTwitterPrediction), (r"(.*)", MainHandler), #(r"/testPost/(.*)", TwitterTestPoster), ],**settings) if __name__ == "__main__": lib.util.parse_config_file("config.conf") server = options.mysql["server"] user = options.mysql["user"] password = options.mysql["password"] database = options.mysql["database"] conn = pymysql.connect(host=server, user=user, password=password, db=database,cursorclass=pymysql.cursors.DictCursor, charset='utf8') define("connection", conn) app = make_app(options.as_dict()) app.listen(8080) tornado.ioloop.IOLoop.current().start()
import tornado.web import settings import email_sender import urls import logging import datetime import traceback from tornado.options import options if __name__ == "__main__": # Load settings settings.load_settings() application = tornado.web.Application(urls.urls, **options.as_dict()) application.listen(options.port) server_instance = tornado.ioloop.IOLoop.instance() # tornado.autoreload.add_reload_hook(database.release) try: server_instance.start() except KeyboardInterrupt: logging.error("Existing") exit_error = u'Keyboard Exit' except Exception, e: logging.exception(e) exit_error = traceback.format_exec() finally: server_instance.add_callback(server_instance.stop) exit_error = str(datetime.datetime.now()) + '\n' + exit_error
def get(self): # Pass all command line options as a template_options = options.as_dict() self.render("index.html", **template_options)
def log_config(): """Logs the config used to start the application """ logging.info('Service will be started with such settings:') for o in options.as_dict(): logging.info("{}=\"{}\"".format(o, options.as_dict()[o]))
def log_config(): """Logs the config used to start the application""" conf = '\n'.join( ['{}="{}"'.format(k, v) for k, v in sorted(options.as_dict().iteritems())]) logging.info('Service started with the following settings:\n' + conf)
def checkFileDir(): if not os.path.isdir(options.fileDir): if os.path.exists(options.fileDir): raise ValueError("Path %s is not a valid directory" % options.fileDir) os.mkdir(options.fileDir) testFile = os.path.join(options.fileDir, "write_test_file.txt") testText = json.dumps(options.as_dict(), indent = 4) with open(testFile, "wb") as f: f.write(testText) with open(testFile, "rb") as f: data = f.read() if data != testText: raise ValueError("Write data and read data not match.") os.remove(testFile) testList = [] testList.append(checkSeparator) testList.append(checkFileDir) for func in testList: func() globals().update(options.as_dict()) def show(): print "options = ", json.dumps(options.as_dict(), indent = 4) print "settings = ", json.dumps(settings, indent = 4) print "version = %s" % version if __name__ == '__main__': show()
import os import subprocess import logging from tornado.options import define, options, parse_command_line from backend import server def build_ui(): """Build the front-end assets using the `npm` toolchain""" previous_dir = os.getcwd() os.chdir(os.path.join(os.path.dirname(__file__), "ui")) logging.info("Building UI assets") subprocess.call(["npm", "run", "build"]) os.chdir(previous_dir) define("port", default=8080, help="run on the given port", type=int) define("debug", default=False, help="turn on debugging", type=bool) define("skip-build", default=False, help="skip rebuilding UI", type=bool) if __name__ == '__main__': parse_command_line() if not options.skip_build: build_ui() server.run(**options.as_dict())
def show(): print "options = ", json.dumps(options.as_dict(), indent = 4) print "settings = ", json.dumps(settings, indent = 4) print "version = %s" % version
login_url = "/auth/signin", cookie_secret = options.secret_key, xsrf_cookie = True ) self.database = DatabaseManager(options.db_host, options.db_user, options.db_pass, options.db_dbbs, charset = "cp1250") def validate_option(dict, key): try: value = dict[key] if value == None: return False except KeyError: return False return True if __name__ == "__main__": options.parse_command_line() if options.config is not None: options.parse_config_file(options.config) # wymagana konfiguracja aplikacji opt = options.as_dict() assert validate_option(opt, "db_host") assert validate_option(opt, "db_user") assert validate_option(opt, "db_pass") assert validate_option(opt, "db_dbbs") app = ClassRegisterApplication() app.listen(options.port) IOLoop.instance().start()
def initialize(self): """Auth by cert""" self.validDNs = getValidDNs_from_file(options.as_dict()["dn_filename"])