def define_logging_options(options=None): if options is None: # late import to prevent cycle from tornado.options import options options.define("logging", default="info", help=("Set the Python log level. If 'none', tornado won't touch the " "logging configuration."), metavar="debug|info|warning|error|none") options.define("log_to_stderr", type=bool, default=None, help=("Send log output to stderr (colorized if possible). " "By default use stderr if --log_file_prefix is not set and " "no other logging is configured.")) options.define("log_file_prefix", type=str, default=None, metavar="PATH", help=("Path prefix for log files. " "Note that if you are running multiple tornado processes, " "log_file_prefix must be different for each of them (e.g. " "include the port number)")) options.define("log_file_max_size", type=int, default=100 * 1000 * 1000, help="max size of log files before rollover") options.define("log_file_num_backups", type=int, default=10, help="number of log files to keep") options.define("log_formatter", type=str, default="tornado.log.LogFormatter", help="log formatter class, by default use tornado.log.LogFormatter") options.add_parse_callback(enable_pretty_logging)
def define_logging_options(options=None): """Add logging-related flags to ``options``. These options are present automatically on the default options instance; this method is only necessary if you have created your own `.OptionParser`. .. versionadded:: 4.2 This function existed in prior versions but was broken and undocumented until 4.2. """ if options is None: # late import to prevent cycle from tornado.options import options options.define( "logging", default="info", help=("Set the Python log level. If 'none', tornado won't touch the " "logging configuration."), metavar="debug|info|warning|error|none") options.define( "log_to_stderr", type=bool, default=None, help=("Send log output to stderr (colorized if possible). " "By default use stderr if --log_file_prefix is not set and " "no other logging is configured.")) options.define( "log_file_prefix", type=str, default=None, metavar="PATH", help=("Path prefix for log files. " "Note that if you are running multiple tornado processes, " "log_file_prefix must be different for each of them (e.g. " "include the port number)")) options.define("log_file_max_size", type=int, default=100 * 1000 * 1000, help="max size of log files before rollover") options.define("log_file_num_backups", type=int, default=10, help="number of log files to keep") options.define( "log_rotate_when", type=str, default='midnight', help=("specify the type of TimedRotatingFileHandler interval " "other options:('S', 'M', 'H', 'D', 'W0'-'W6')")) options.define("log_rotate_interval", type=int, default=1, help="The interval value of timed rotating") options.define("log_rotate_mode", type=str, default='size', help="The mode of rotating files(time or size)") options.add_parse_callback(lambda: enable_pretty_logging(options))
def define_logging_options(options=None): """Add logging-related flags to ``options``. These options are present automatically on the default options instance; this method is only necessary if you have created your own `.OptionParser`. .. versionadded:: 4.2 This function existed in prior versions but was broken and undocumented until 4.2. """ if options is None: # late import to prevent cycle from tornado.options import options options.define("logging", default="info", help=("Set the Python log level. If 'none', tornado won't touch the " "logging configuration."), metavar="debug|info|warning|error|none") options.define("log_to_stderr", type=bool, default=None, help=("Send log output to stderr (colorized if possible). " "By default use stderr if --log_file_prefix is not set and " "no other logging is configured.")) options.define("log_file_prefix", type=str, default=None, metavar="PATH", help=("Path prefix for log files. " "Note that if you are running multiple tornado processes, " "log_file_prefix must be different for each of them (e.g. " "include the port number)")) options.define("log_file_max_size", type=int, default=100 * 1000 * 1000, help="max size of log files before rollover") options.define("log_file_num_backups", type=int, default=10, help="number of log files to keep") options.add_parse_callback(lambda: enable_pretty_logging(options))
def start(prefix, settings, modules, routes, known_exceptions, **kwargs): """starts the tornado application. :param prefix: the url prefix :param settings: the user defined settings :param modules: the modules to load :param handlers: the list of url routes (url, handler) :param known_exceptions: the mapping of known exceptions to HTTP codes :param kwargs: the tornado application arguments """ from tornado.options import options options.define( "config", type=str, help="path to config file", callback=lambda p: options.parse_config_file(p, final=False)) options.define("port", default=8000, help="listening port", type=int) options.define("address", default='127.0.0.1', help="listening address") options.add_parse_callback(log.patch_logger) loop = _get_event_loop() modules_registry = ModulesRegistry(loop.asyncio_loop, log.gen_log) for module in modules: modules_registry.lazy_load(module, options) for opt in settings: options.define(**opt) options.parse_command_line(final=True) if not prefix.endswith('/'): prefix += '/' kwargs.update(options.group_dict('application')) kwargs.setdefault('default_handler_class', handler.DefaultHandler) # prevent override this option kwargs['known_exceptions'] = known_exceptions kwargs['modules'] = modules_registry handlers = [] for uri, methods in routes: log.app_log.info("add resource: %s", uri) handlers.append((_concat_url(prefix, uri), compile_handler(methods))) app = web.Application(handlers, **kwargs) app.listen(options.port, options.address, xheaders=True) signal.signal(signal.SIGTERM, lambda *x: loop.stop()) log.app_log.info("start listening on %s:%d", options.address, options.port or 80) try: loop.start() except (KeyboardInterrupt, SystemExit): pass loop.close() log.app_log.info("gracefully shutdown.")
def setup_settings(settings_package, pull_options=True, default_settings='base', final=False): from tornado.log import enable_pretty_logging from tornado.options import options options.define('settings', default=default_settings, help='Define settings module') def parse_callback(): global settings settings.load(settings_package, options.settings) if pull_options: # let's pull options from the settings and vice versa for option_name in options: src, dst = (settings, options) if option_name in settings else (options, settings) setattr(dst, option_name, src[option_name]) # resets logging configuration enable_pretty_logging() options.add_parse_callback(callback=parse_callback) global settings settings.setup_settings = [settings_package, default_settings, final] if final: options.run_parse_callbacks()
def start(prefix, settings, modules, routes, known_exceptions, **kwargs): """starts the tornado application. :param prefix: the url prefix :param settings: the user defined settings :param modules: the modules to load :param handlers: the list of url routes (url, handler) :param known_exceptions: the mapping of known exceptions to HTTP codes :param kwargs: the tornado application arguments """ from tornado.options import options options.define("config", type=str, help="path to config file", callback=lambda p: options.parse_config_file(p, final=False)) options.define("port", default=8000, help="listening port", type=int) options.define("address", default='127.0.0.1', help="listening address") options.add_parse_callback(log.patch_logger) loop = _get_event_loop() modules_registry = ModulesRegistry(loop.asyncio_loop, log.gen_log) for module in modules: modules_registry.lazy_load(module, options) for opt in settings: options.define(**opt) options.parse_command_line(final=True) if not prefix.endswith('/'): prefix += '/' kwargs.update(options.group_dict('application')) kwargs.setdefault('default_handler_class', handler.DefaultHandler) # prevent override this option kwargs['known_exceptions'] = known_exceptions kwargs['modules'] = modules_registry handlers = [] for uri, methods in routes: log.app_log.info("add resource: %s", uri) handlers.append((_concat_url(prefix, uri), compile_handler(methods))) app = web.Application(handlers, **kwargs) app.listen(options.port, options.address, xheaders=True) signal.signal(signal.SIGTERM, lambda *x: loop.stop()) log.app_log.info("start listening on %s:%d", options.address, options.port or 80) try: loop.start() except (KeyboardInterrupt, SystemExit): pass loop.close() log.app_log.info("gracefully shutdown.")
def main(): set_log_setting() options.add_parse_callback(set_log_setting) options.parse_command_line() print_server_info() application = Application() sockets = bind_sockets(options.port) # tornado.process.fork_processes(0) server = HTTPServer(application, xheaders=settings.XHEADERS) server.add_sockets(sockets) IOLoop.current().start()
def main(): set_log_setting() options.add_parse_callback(set_log_setting) options.parse_command_line() print_server_info() application = Application() sockets = bind_sockets(options.port) tornado.process.fork_processes(0) server = HTTPServer(application, xheaders=settings.XHEADERS) server.add_sockets(sockets) IOLoop.current().start()
def define_logging_options(options=None): if options is None: # late import to prevent cycle from tornado.options import options options.define("logging", default="info", help=("Set the Python log level. If 'none', tornado won't touch the " "logging configuration."), metavar="debug|info|warning|error|none") options.define("log_to_stderr", type=bool, default=None, help=("Send log output to stderr (colorized if possible). " "By default use stderr if --log_file_prefix is not set and " "no other logging is configured.")) options.define("log_file_prefix", type=str, default=None, metavar="PATH", help=("Path prefix for log files. " "Note that if you are running multiple tornado processes, " "log_file_prefix must be different for each of them (e.g. " "include the port number)")) options.define("log_file_max_size", type=int, default=100 * 1000 * 1000, help="max size of log files before rollover") options.define("log_file_num_backups", type=int, default=10, help="number of log files to keep") options.add_parse_callback(enable_pretty_logging)
stats[key + '_AvgTime'] = [0.0, 0] def _run_rrd(self): # Puts data to RRD-archive if self.rrd: _time = int(time()) _data = [ stats['Agent_Unique'], stats['Interact_Unique'], stats['Interact_On'], #started stats['Interact_AvgTime'][1], #completed stats['Interact_Failed'], #failed stats['Bytes_Period'], int(stats['CPU_time'] * pow(10, 6)), #mcs stats['Interact_AvgTime'][0], #duration stats['Interact_AvgTime'][0] / stats['Interact_AvgTime'][1] if stats['Interact_AvgTime'][1] else 0, ] _values = tuple(imap(str, _data)) self.rrd.bufferValue(_time, *_values) try: self.rrd.update(debug=True) except ExternalCommandError as e: gen_log.error(str(e)) self.rrd.values = [] stats_mon = stats_monitor() options.add_parse_callback(stats_mon.reset)
def _set_timeout_options(): global MN_AGENT_TIMEOUT,MN_AGENT_CACHE_TIMEOUT,MN_CLIENT_TIMEOUT,MN_NO_REPLY_TIMEOUT MN_AGENT_TIMEOUT = timedelta(0, options.timeout_agent) MN_AGENT_CACHE_TIMEOUT = timedelta(0, options.timeout_cache) MN_CLIENT_TIMEOUT = timedelta(0, options.timeout_client) MN_NO_REPLY_TIMEOUT = timedelta(0, options.timeout_no_reply) pass interactions = {} awaiting = {} awaiting_cache = {} clients = {} request_enum = itertools.count() _set_timeout_options() options.add_parse_callback(_set_timeout_options) class Interaction: # Desktop / App interaction def __init__(self, client, agent): self.client = client self.agent = agent self.RequestID = self.agent.RequestID = self.client.RequestID = getRequestID() self.ProductID = client.ProductID self._destination = None self._source = None self._completed = False interactions[self.RequestID] = self if options.stats_enabled: stats_mon._stats_on('Interact',self.ProductID)
define("sudo", type=bool, default=False, help="use sudo") def validate_options(): if not options.command: options.print_help() raise Error('missing required command option') if not options.hosts and not options.hosts_file: options.print_help() raise Error('missing required option hosts or hosts-file') class Error(Exception): pass if __name__ == '__main__': options.logging = 'warn' options.add_parse_callback(validate_options) options.parse_command_line() if not os.path.exists(options.outdir): os.mkdir(options.outdir) logger = logging.getLogger() if options.hosts_file: with open(options.hosts_file) as file: hosts = [ l.strip() for l in file.readlines() ] client = pssh.ParallelSSHClient(hosts, pool_size=50) greenlets = client.exec_command(options.command, sudo=options.sudo) host_responses = {} for g in greenlets: response = client.get_stdout(g, return_buffers=True) host = response.keys().pop() retval = response[host]['exit_code'] stderr = [line for line in response[host]['stderr']]
stats[key+'_AvgTime']=[0.0,0] def _run_rrd(self): # Puts data to RRD-archive if self.rrd: _time = int(time()) _data = [stats['Agent_Unique'], stats['Interact_Unique'], stats['Interact_On'], #started stats['Interact_AvgTime'][1], #completed stats['Interact_Failed'], #failed stats['Bytes_Period'], int(stats['CPU_time']*pow(10,6)), #mcs stats['Interact_AvgTime'][0], #duration stats['Interact_AvgTime'][0]/stats['Interact_AvgTime'][1] if stats['Interact_AvgTime'][1] else 0, ] _values = tuple(imap(str,_data)) self.rrd.bufferValue(_time, *_values) try: self.rrd.update(debug=True) except ExternalCommandError as e: gen_log.error(str(e)) self.rrd.values=[] stats_mon = stats_monitor() options.add_parse_callback(stats_mon.reset)
def _set_timeout_options(): global MN_AGENT_TIMEOUT, MN_AGENT_CACHE_TIMEOUT, MN_CLIENT_TIMEOUT, MN_NO_REPLY_TIMEOUT MN_AGENT_TIMEOUT = timedelta(0, options.timeout_agent) MN_AGENT_CACHE_TIMEOUT = timedelta(0, options.timeout_cache) MN_CLIENT_TIMEOUT = timedelta(0, options.timeout_client) MN_NO_REPLY_TIMEOUT = timedelta(0, options.timeout_no_reply) pass interactions = {} awaiting = {} awaiting_cache = {} clients = {} request_enum = itertools.count() _set_timeout_options() options.add_parse_callback(_set_timeout_options) class Interaction: # Desktop / App interaction def __init__(self, client, agent): self.client = client self.agent = agent self.RequestID = self.agent.RequestID = self.client.RequestID = getRequestID( ) self.ProductID = client.ProductID self._destination = None self._source = None self._completed = False interactions[self.RequestID] = self if options.stats_enabled: