Esempio n. 1
0
def setup_settings(settings_package, pull_options=True, default_settings='base', final=False):
    from tornado.log import enable_pretty_logging
    from tornado.options import options

    options.define('settings', default=default_settings, help='Define settings module')

    def parse_callback():
        global settings
        settings.load(settings_package, options.settings)

        if pull_options:
            # let's pull options from the settings and vice versa
            for option_name in options:
                src, dst = (settings, options) if option_name in settings else (options, settings)
                setattr(dst, option_name, src[option_name])
            # resets logging configuration
            enable_pretty_logging()

    options.add_parse_callback(callback=parse_callback)

    global settings
    settings.setup_settings = [settings_package, default_settings, final]

    if final:
        options.run_parse_callbacks()
Esempio n. 2
0
def define_logging_options(options=None):
    """Add logging-related flags to ``options``.

    These options are present automatically on the default options instance;
    this method is only necessary if you have created your own `.OptionParser`.

    .. versionadded:: 4.2
        This function existed in prior versions but was broken and undocumented until 4.2.
    """
    if options is None:
        # late import to prevent cycle
        from tornado.options import options
    options.define("logging", default="info",
                   help=("Set the Python log level. If 'none', tornado won't touch the "
                         "logging configuration."),
                   metavar="debug|info|warning|error|none")
    options.define("log_to_stderr", type=bool, default=None,
                   help=("Send log output to stderr (colorized if possible). "
                         "By default use stderr if --log_file_prefix is not set and "
                         "no other logging is configured."))
    options.define("log_file_prefix", type=str, default=None, metavar="PATH",
                   help=("Path prefix for log files. "
                         "Note that if you are running multiple tornado processes, "
                         "log_file_prefix must be different for each of them (e.g. "
                         "include the port number)"))
    options.define("log_file_max_size", type=int, default=100 * 1000 * 1000,
                   help="max size of log files before rollover")
    options.define("log_file_num_backups", type=int, default=10,
                   help="number of log files to keep")

    options.add_parse_callback(lambda: enable_pretty_logging(options))
Esempio n. 3
0
def _add_debug(logger):
    """Add a debug option to the option parser.

    The debug option is True if --logging=DEBUG is passed, False otherwise.
    """
    debug = logger.level == logging.DEBUG
    options.define('debug', default=debug)
Esempio n. 4
0
def main():
  """ Starts a web service for handing datastore requests. """

  global datastore_access
  global server_node
  global zookeeper
  zookeeper_locations = appscale_info.get_zk_locations_string()

  parser = argparse.ArgumentParser()
  parser.add_argument('-t', '--type', choices=dbconstants.VALID_DATASTORES,
                      default=dbconstants.VALID_DATASTORES[0],
                      help='Database type')
  parser.add_argument('-p', '--port', type=int,
                      default=dbconstants.DEFAULT_PORT,
                      help='Datastore server port')
  parser.add_argument('-v', '--verbose', action='store_true',
                      help='Output debug-level logging')
  args = parser.parse_args()

  if args.verbose:
    logging.getLogger('appscale').setLevel(logging.DEBUG)

  options.define('private_ip', appscale_info.get_private_ip())
  options.define('port', args.port)
  taskqueue_locations = get_load_balancer_ips()

  server_node = '{}/{}:{}'.format(DATASTORE_SERVERS_NODE, options.private_ip,
                                  options.port)

  datastore_batch = DatastoreFactory.getDatastore(
    args.type, log_level=logger.getEffectiveLevel())
  zookeeper = zktransaction.ZKTransaction(
    host=zookeeper_locations, db_access=datastore_batch,
    log_level=logger.getEffectiveLevel())

  zookeeper.handle.add_listener(zk_state_listener)
  zookeeper.handle.ensure_path(DATASTORE_SERVERS_NODE)
  # Since the client was started before adding the listener, make sure the
  # server node gets created.
  zk_state_listener(zookeeper.handle.state)
  zookeeper.handle.ChildrenWatch(DATASTORE_SERVERS_NODE, update_servers_watch)

  transaction_manager = TransactionManager(zookeeper.handle)
  datastore_access = DatastoreDistributed(
    datastore_batch, transaction_manager, zookeeper=zookeeper,
    log_level=logger.getEffectiveLevel(),
    taskqueue_locations=taskqueue_locations)
  index_manager = IndexManager(zookeeper.handle, datastore_access,
                               perform_admin=True)
  datastore_access.index_manager = index_manager

  server = tornado.httpserver.HTTPServer(pb_application)
  server.listen(args.port)

  IOLoop.current().start()
Esempio n. 5
0
def getTornadoUrl():
    import socket
    from tornado.options import options
    ip = socket.gethostbyname(socket.gethostname())
    # Check config file
    SERVER_CONFIG = '/etc/domoweb.cfg'
    if not os.path.isfile(SERVER_CONFIG):
        sys.stderr.write("Error: Can't find the file '%s'\n" % SERVER_CONFIG)
        sys.exit(1)

    options.define("port", default=40404, help="Launch on the given port", type=int)
    options.parse_config_file(SERVER_CONFIG)

    return "http://%s:%s/" % (ip, options.port)
Esempio n. 6
0
def main():
    options.define("port", default="8888")
    options.define("watch", type=str, multiple=True, default=".",
                   help="watch file or directory")
    options.define("htdoc", type=str, default=".", help="root directory of HTML documents")
    options.define("command", type=str, multiple=True, metavar="COMMAND",
                   help="run COMMAND when file or directory is changed")
    options.parse_command_line()

    mypath = os.path.dirname(os.path.abspath(__file__))
    assets_path = os.path.join(mypath, 'assets')

    for f in options.watch:
        watcher.watch(f)

    watcher.add_hook(_run_cmd)
    watcher.start()

    application = tornado.web.Application([
        (r"/ws", WebSocketHandler),
        (r"/assets/(.*)", tornado.web.StaticFileHandler, {"path": assets_path}),
        (r"/(.*\.html)", MainHandler),
        (r"/(.*)", tornado.web.StaticFileHandler, {"path": options.htdoc}),
    ])
    application.listen(8888)

    logging.info('starting application')
    try:
        tornado.ioloop.IOLoop.instance().start()
    except KeyboardInterrupt:
        logging.info('bye')
Esempio n. 7
0
def parse_config_file(filename):
    """Rewrite tornado default parse_config_file.

    Parses and loads the Python config file at the given filename.
    This version allow customize new options which are not defined before
    from a configuration file.
    """
    config = {}
    exec(compile(io.open(filename, encoding="UTF-8").read().encode("UTF-8"), filename, "exec"), {}, config)

    for name in config:
        if name in options:
            options[name] = config[name]
        else:
            options.define(name, config[name])
Esempio n. 8
0
def make_app(secret, is_autoscaler):
  options.__dict__['_options'].clear()
  options.define('secret', secret)
  agent_factory = InfrastructureAgentFactory()

  if is_autoscaler:
    scaler_route = ('/instances', InstancesHandler,
                    {'agent_factory': agent_factory})
  else:
    scaler_route = ('/instances', Respond404Handler,
                    dict(reason='This node was not started as an autoscaler.'))
  app = web.Application([
    ('/instance', InstanceHandler, {'agent_factory': agent_factory}),
    scaler_route,
  ])
  return app
Esempio n. 9
0
def start(prefix, settings, modules, routes, known_exceptions, **kwargs):
    """starts the tornado application.
    :param prefix: the url prefix
    :param settings: the user defined settings
    :param modules: the modules to load
    :param handlers: the list of url routes (url, handler)
    :param known_exceptions: the mapping of known exceptions to HTTP codes
    :param kwargs: the tornado application arguments
    """
    from tornado.options import options

    options.define("config", type=str, help="path to config file",
                   callback=lambda p: options.parse_config_file(p, final=False))
    options.define("port", default=8000, help="listening port", type=int)
    options.define("address", default='127.0.0.1', help="listening address")

    options.add_parse_callback(log.patch_logger)

    loop = _get_event_loop()
    modules_registry = ModulesRegistry(loop.asyncio_loop, log.gen_log)

    for module in modules:
        modules_registry.lazy_load(module, options)

    for opt in settings:
        options.define(**opt)

    options.parse_command_line(final=True)

    if not prefix.endswith('/'):
        prefix += '/'

    kwargs.update(options.group_dict('application'))
    kwargs.setdefault('default_handler_class', handler.DefaultHandler)
    # prevent override this option
    kwargs['known_exceptions'] = known_exceptions
    kwargs['modules'] = modules_registry

    handlers = []
    for uri, methods in routes:
        log.app_log.info("add resource: %s", uri)
        handlers.append((_concat_url(prefix, uri), compile_handler(methods)))

    app = web.Application(handlers, **kwargs)
    app.listen(options.port, options.address, xheaders=True)

    signal.signal(signal.SIGTERM, lambda *x: loop.stop())
    log.app_log.info("start listening on %s:%d", options.address, options.port or 80)

    try:
        loop.start()
    except (KeyboardInterrupt, SystemExit):
        pass

    loop.close()
    log.app_log.info("gracefully shutdown.")
Esempio n. 10
0
def main():
    options.define('certfile', help="Path to SSL certificate to enable TSL")
    options.define('keyfile', help="Path to SSL key to enable TSL")
    unparsed = options.parse_command_line()
    if len(unparsed) == 1:
        config_file = options.parse_command_line()[0]
    else:
        raise ValueError("Configuration file is not specified")
    if options.certfile and options.keyfile:
        ssl_options = {
            "certfile": options.certfile,
            "keyfile": options.keyfile,
        }
    else:
        ssl_options = None
    server = ForwardServer(ssl_options=ssl_options)
    server.bind_from_config_file(config_file)
    IOLoop.instance().start()
Esempio n. 11
0
    def get(self, path=None):
        """main page loader
        if the path doesnt exists, it is set index
        if path isnt toc, toc is loaded
        if path isnt index, index is loaded
        """
        page_content = ''
        ajax = self.is_ajax()
        data = False
        theme = self.get_argument('theme', None)
        
        if theme is not None:
            options.define('theme', default=theme, mutable=True)

        if path is None:
            path = 'index'
            
        if options.allow_data:
            data = self.is_data()

        path = slug_to_name(path)

        if ajax is False or data is False:
            if path is None or str(path).lower() != 'toc':
                title, slug, date, template, content= self.parse_page('toc')

            if path is None or str(path).lower() != 'index':
                title, slug, date, template, content = self.parse_page('index')


        title, slug, date, template, content = self.parse_page(path)
        page_content += content
                
        if ajax is True or data is True:
            self.write({
                'title': title,
                'slug': slug,
                'date': date,
                'path': '/' + path,
                'content': page_content
            })
        else:
            page_template = self.get_template_page('base') 
            self.render(page_template, title=title, page_content=page_content)
Esempio n. 12
0
def main():
  """ Main. """
  parser = argparse.ArgumentParser()
  parser.add_argument(
    '-v', '--verbose', action='store_true',
    help='Output debug-level logging')
  args = parser.parse_args()

  logging.basicConfig(format=LOG_FORMAT, level=logging.INFO)
  if args.verbose:
    logging.getLogger().setLevel(logging.DEBUG)

  options.define('secret', appscale_info.get_secret())

  signal.signal(signal.SIGTERM, signal_handler)
  signal.signal(signal.SIGINT, signal_handler)

  my_ip = appscale_info.get_private_ip()
  is_master = (my_ip == appscale_info.get_headnode_ip())
  is_lb = (my_ip in appscale_info.get_load_balancer_ips())
  is_tq = (my_ip in appscale_info.get_taskqueue_nodes())
  is_db = (my_ip in appscale_info.get_db_ips())

  if is_master:
    global zk_client
    zk_client = KazooClient(
      hosts=','.join(appscale_info.get_zk_node_ips()),
      connection_retry=ZK_PERSISTENT_RECONNECTS)
    zk_client.start()
    # Start watching profiling configs in ZooKeeper
    stats_app.ProfilingManager(zk_client)

  app = tornado.web.Application(
    stats_app.get_local_stats_api_routes(is_lb, is_tq, is_db)
    + stats_app.get_cluster_stats_api_routes(is_master),
    debug=False
  )
  app.listen(constants.HERMES_PORT)

  # Start loop for accepting http requests.
  IOLoop.instance().start()

  logger.info("Hermes is up and listening on port: {}."
               .format(constants.HERMES_PORT))
Esempio n. 13
0
    def get(self, path=None):
        """main page loader
        if the path doesnt exists, it is set index
        if path isnt toc, toc is loaded
        if path isnt index, index is loaded
        """
        page_content = ""
        ajax = self.is_ajax()
        data = False
        preview = False
        theme = self.get_argument("theme", None)

        if theme is not None:
            options.define("theme", default=theme, mutable=True)

        if path is None:
            path = "index"

        if options.allow_data:
            data = self.is_data()

        if options.allow_preview:
            preview = self.is_preview()

        path = slug_to_name(path)

        if ajax is False or data is False:
            if path is None or str(path).lower() != "toc":
                title, slug, date, template, content = self.parse_page("toc")

            if path is None or str(path).lower() != "index":
                title, slug, date, template, content = self.parse_page("index")

        title, slug, date, template, content = self.parse_page(path, is_preview=preview)
        page_content += content

        if ajax is True or data is True:
            self.write({"title": title, "slug": slug, "date": date, "path": "/" + path, "content": page_content})
        else:
            page_template = get_template_page("base")
            self.render(page_template, title=title, page_content=page_content)
Esempio n. 14
0
def define_logging_options(options=None):
    if options is None:
        # late import to prevent cycle
        from tornado.options import options
    options.define(
        "logging",
        default="info",
        help=("Set the Python log level. If 'none', tornado won't touch the " "logging configuration."),
        metavar="debug|info|warning|error|none",
    )
    options.define(
        "log_to_stderr",
        type=bool,
        default=None,
        help=(
            "Send log output to stderr (colorized if possible). "
            "By default use stderr if --log_file_prefix is not set and "
            "no other logging is configured."
        ),
    )
    options.define(
        "log_file_prefix",
        type=str,
        default=None,
        metavar="PATH",
        help=(
            "Path prefix for log files. "
            "Note that if you are running multiple tornado processes, "
            "log_file_prefix must be different for each of them (e.g. "
            "include the port number)"
        ),
    )
    options.define(
        "log_file_max_size", type=int, default=100 * 1000 * 1000, help="max size of log files before rollover"
    )
    options.define("log_file_num_backups", type=int, default=10, help="number of log files to keep")

    options.add_parse_callback(enable_pretty_logging)
Esempio n. 15
0
import asyncio
from tornado import web
from tornado.platform.asyncio import AsyncIOMainLoop
from tornado.options import options, define

from urls import routes
from settings import app_settings, DATA_BASE

from peewee_async import Manager

options.define('port', default=8000, type=int, help="监听的端口号")


class App(web.Application):
    def __init__(self) -> None:
        super(App, self).__init__(routes,
                                  default_host=None,
                                  transforms=None,
                                  **app_settings)
        self.db_manager = None

    def setup_db_manager(self, db_manager):
        """
            引入 peewee-async 的 Manager。
        """
        self.db_manager = db_manager


def main() -> None:
    # 必须明确指出 使用 asyncio 事件循环,不知道是不是 只有windows下才需要明确指出
    AsyncIOMainLoop().install()
Esempio n. 16
0
if __name__ == '__main__':

#    domoweb.VERSION = "dev.%s" % commands.getoutput("cd %s ; hg id -n 2>/dev/null" % domoweb.PROJECTPATH)

    # Check log folder
    if not os.path.isdir("/var/log/domoweb"):
        sys.stderr.write("Error: /var/log/domoweb do not exist")
        sys.exit(1)
    
    # Check config file
    SERVER_CONFIG = '/etc/domoweb.cfg'
    if not os.path.isfile(SERVER_CONFIG):
        sys.stderr.write("Error: Can't find the file '%s'\n" % SERVER_CONFIG)
        sys.exit(1)

    options.define("sqlite_db", default="/var/lib/domoweb/db.sqlite", help="Database file path", type=str)
    options.define("port", default=40404, help="Launch on the given port", type=int)
    options.define("debut", default=False, help="Debug mode", type=bool)
    options.parse_config_file("/etc/domoweb.cfg")

    logger.info("Running from : %s" % domoweb.PROJECTPATH)

    packLoader.loadWidgets(domoweb.PACKSPATH)
    packLoader.loadIconsets(domoweb.PACKSPATH)
    if not os.path.isdir(os.path.join(domoweb.VARPATH, 'backgrounds')):
        os.mkdir(os.path.join(domoweb.VARPATH, 'backgrounds'))
        logger.info("Creating : %s" % os.path.join(domoweb.VARPATH, 'backgrounds'))
    if not os.path.isdir(os.path.join(domoweb.VARPATH, 'backgrounds', 'thumbnails')):
        os.mkdir(os.path.join(domoweb.VARPATH, 'backgrounds', 'thumbnails'))

    mqDataLoader.loadDatatypes()
Esempio n. 17
0
from tornado.gen import Future
from tornado.httpclient import HTTPError
from tornado.options import options
from tornado.testing import AsyncTestCase
from tornado.testing import gen_test

from appscale.common import (file_io, appscale_info, misc, monit_interface,
                             testing)
from appscale.common import monit_app_configuration
from appscale.common.monit_interface import MonitOperator

sys.path.append(os.path.join(os.path.dirname(__file__), "../../"))
import app_manager_server
from app_manager_server import BadConfigurationException

options.define('login_ip', '127.0.0.1')
options.define('syslog_server', '127.0.0.1')
options.define('private_ip', '<private_ip>')
options.define('db_proxy', '<private_ip>')
options.define('tq_proxy', '<private_ip>')


class TestAppManager(AsyncTestCase):
    @gen_test
    def test_start_app_badconfig(self):
        testing.disable_logging()

        with self.assertRaises(BadConfigurationException):
            yield app_manager_server.start_app('test', {})

    @gen_test
Esempio n. 18
0
from urllib.parse import urlencode
from tornado.httpclient import AsyncHTTPClient
from tornado.autoreload import watch
from torskel.libs.db_utils.mongo import get_mongo_pool
from torskel.libs.db_utils.mongo import bulk_mongo_insert
from torskel.libs.str_consts import INIT_REDIS_LABEL
from torskel.libs.event_controller import TorskelEventLogController
from torskel.libs.startup import server_init

settings = {
    # 'cookie_secret': options.secret_key,
    # 'xsrf_cookies': True,
}

# server params
options.define('debug', default=True, help='debug mode', type=bool)
options.define("port", default=8888, help="run on the given port", type=int)
options.define("srv_name", 'LOCAL', help="Server verbose name", type=str)
options.define("run_on_socket", False, help="Run on socket", type=bool)
options.define("socket_path", None, help="Path to unix-socket", type=str)

# xml-rpc
options.define('use_xmlrpc',
               default=False,
               help='use xmlrpc client',
               type=bool)
options.define("max_xmlrpc_clients", default=10, type=int)

# http-client params
options.define("max_http_clients", default=100, type=int)
options.define("http_client_timeout", default=30, type=int)
Esempio n. 19
0
__author__ = 'Hermann Schachner'

import logging
import concurrent.futures
import hexdump

from tornado.ioloop import IOLoop
from tornado.web import *
from tornado.options import options

options.define('port', type=int, default=45678, help="Port for mini server to listen at")
options.define('endpoint', default=r"/(\w+)(?:/(\w+))?", help="URL base path of API")

handlers = list()


def handler(method):
    handlers.append(method.__name__)

    def _inner(self, *args):
        method(self, *args)

    return _inner


def fib_gen(n_max):
    a, b, n = 0, 1, 0
    while n < n_max:
        yield b
        a, b, n = b, a + b, n + 1
Esempio n. 20
0
    #    domoweb.VERSION = "dev.%s" % commands.getoutput("cd %s ; hg id -n 2>/dev/null" % domoweb.PROJECTPATH)

    # Check log folder
    if not os.path.isdir("/var/log/domoweb"):
        sys.stderr.write("Error: /var/log/domoweb do not exist")
        sys.exit(1)

    # Check config file
    SERVER_CONFIG = '/etc/domoweb.cfg'
    if not os.path.isfile(SERVER_CONFIG):
        sys.stderr.write("Error: Can't find the file '%s'\n" % SERVER_CONFIG)
        sys.exit(1)

    options.define("sqlite_db",
                   default="/var/lib/domoweb/db.sqlite",
                   help="Database file path",
                   type=str)
    options.define("port",
                   default=40404,
                   help="Launch on the given port (http)",
                   type=int)
    options.define("ssl_port",
                   default=40405,
                   help="Launch on the given port (https)",
                   type=int)
    options.define("debug", default=False, help="Debug mode", type=bool)
    options.define("rest_url",
                   default="http://127.0.0.1:40406/rest",
                   help="RINOR REST Url",
                   type=str)
    options.define("develop", default=False, help="Develop mode", type=bool)
Esempio n. 21
0
def main():
  """ Starts the AdminServer. """
  logging.basicConfig(format=LOG_FORMAT, level=logging.INFO)

  parser = argparse.ArgumentParser(
    prog='appscale-admin', description='Manages AppScale-related processes')
  subparsers = parser.add_subparsers(dest='command')
  subparsers.required = True

  serve_parser = subparsers.add_parser(
    'serve', description='Starts the server that manages AppScale processes')
  serve_parser.add_argument(
    '-p', '--port', type=int, default=constants.DEFAULT_PORT,
    help='The port to listen on')
  serve_parser.add_argument(
    '-v', '--verbose', action='store_true', help='Output debug-level logging')

  subparsers.add_parser(
    'summary', description='Lists AppScale processes running on this machine')

  args = parser.parse_args()
  if args.command == 'summary':
    table = sorted(list(get_combined_services().items()))
    print(tabulate(table, headers=['Service', 'State']))
    sys.exit(0)

  if args.verbose:
    logger.setLevel(logging.DEBUG)

  options.define('secret', appscale_info.get_secret())
  options.define('login_ip', appscale_info.get_login_ip())
  options.define('private_ip', appscale_info.get_private_ip())
  options.define('load_balancers', appscale_info.get_load_balancer_ips())

  acc = appscale_info.get_appcontroller_client()
  ua_client = UAClient(appscale_info.get_db_master_ip(), options.secret)
  zk_client = KazooClient(
    hosts=','.join(appscale_info.get_zk_node_ips()),
    connection_retry=ZK_PERSISTENT_RECONNECTS)
  zk_client.start()
  version_update_lock = zk_client.Lock(constants.VERSION_UPDATE_LOCK_NODE)
  thread_pool = ThreadPoolExecutor(4)
  monit_operator = MonitOperator()
  all_resources = {
    'acc': acc,
    'ua_client': ua_client,
    'zk_client': zk_client,
    'version_update_lock': version_update_lock,
    'thread_pool': thread_pool
  }

  if options.private_ip in appscale_info.get_taskqueue_nodes():
    logger.info('Starting push worker manager')
    GlobalPushWorkerManager(zk_client, monit_operator)

  service_manager = ServiceManager(zk_client)
  service_manager.start()

  app = web.Application([
    ('/oauth/token', OAuthHandler, {'ua_client': ua_client}),
    ('/v1/apps/([a-z0-9-]+)/services/([a-z0-9-]+)/versions', VersionsHandler,
     all_resources),
    ('/v1/projects', ProjectsHandler, all_resources),
    ('/v1/projects/([a-z0-9-]+)', ProjectHandler, all_resources),
    ('/v1/apps/([a-z0-9-]+)/services/([a-z0-9-]+)', ServiceHandler,
     all_resources),
    ('/v1/apps/([a-z0-9-]+)/services/([a-z0-9-]+)/versions/([a-z0-9-]+)',
     VersionHandler, all_resources),
    ('/v1/apps/([a-z0-9-]+)/operations/([a-z0-9-]+)', OperationsHandler,
     {'ua_client': ua_client}),
    ('/api/cron/update', UpdateCronHandler,
     {'acc': acc, 'zk_client': zk_client, 'ua_client': ua_client}),
    ('/api/queue/update', UpdateQueuesHandler,
     {'zk_client': zk_client, 'ua_client': ua_client})
  ])
  logger.info('Starting AdminServer')
  app.listen(args.port)
  io_loop = IOLoop.current()
  io_loop.start()
Esempio n. 22
0
from PIL import Image
import tornado.gen
import tornado.ioloop
import tornado.iostream
import tornado.tcpserver
import tornado.web
from tornado.options import options

import log
import tornado_h2.http2server as th2
from tornado_h2 import http2_web

logger = logging.getLogger('tornado.application')

options.define('address',
               default='0.0.0.0',
               help='IP address to attach the server to',
               type=str)
options.define('port',
               default=os.environ.get('PORT', 8888),
               help='Port number to run the server on',
               type=int)
options.define('https',
               default=True,
               help='Start application with HTTPS?',
               type=bool)
options.define('debug',
               default=True,
               help='Start application in debug mode?',
               type=bool)

options.define('image_name',
Esempio n. 23
0
def main():
    candidates = ["bootstrap.ini"]
    if len(sys.argv) > 1:
        candidates.append(sys.argv[1])

    config = ConfigParser.SafeConfigParser()
    config.read(candidates)

    options.define("db_engine", config.get("database", "db_engine"))
    options.define("db_echo", True)

    from trade.models import engine, Currency, Instrument, User, Broker, DepositMethods

    session = scoped_session(sessionmaker(bind=engine))

    for section_name in config.sections():
        if section_name == "currencies":
            for id, currency_json in config.items(section_name):
                c = json.loads(currency_json)

                if Currency.get_currency(session, c[0]):
                    continue
                e = Currency(
                    code=c[0],
                    sign=c[1],
                    description=c[2],
                    is_crypto=c[3],
                    pip=c[4],
                    format_python=c[5],
                    format_js=c[6],
                    human_format_python=c[7],
                    human_format_js=c[8],
                )
                session.add(e)
                session.commit()

        if section_name == "instruments":
            for id, instrument_json in config.items(section_name):
                currency_description = json.loads(instrument_json)

                if Instrument.get_instrument(session, currency_description[0]):
                    continue

                e = Instrument(
                    symbol=currency_description[0],
                    currency=currency_description[1],
                    description=currency_description[2],
                )
                session.add(e)
                session.commit()

        if section_name[:4] == "user":
            if not User.get_user(session, config.get(section_name, "username")):
                broker_id = None
                broker_username = None
                password = base64.b32encode(os.urandom(10))
                transaction_fee_buy = None
                transaction_fee_sell = None
                verified = 0
                is_system = False
                is_staff = False
                is_broker = False
                state = None

                try:
                    broker_id = config.getint(section_name, "broker_id")
                except Exception, e:
                    pass

                try:
                    broker_username = config.get(section_name, "broker_username")
                except Exception, e:
                    pass

                try:
                    password = config.get(section_name, "password")
                except Exception, e:
                    pass

                try:
                    transaction_fee_buy = config.getint(section_name, "transaction_fee_buy")
                except Exception, e:
                    pass
Esempio n. 24
0
def define_logging_options(options=None):
    """Add logging-related flags to ``options``.

    These options are present automatically on the default options instance;
    this method is only necessary if you have created your own `.OptionParser`.

    .. versionadded:: 4.2
        This function existed in prior versions but was broken and undocumented until 4.2.
    """
    if options is None:
        # late import to prevent cycle
        from tornado.options import options
    options.define(
        "logging",
        default="info",
        help=("Set the Python log level. If 'none', tornado won't touch the "
              "logging configuration."),
        metavar="debug|info|warning|error|none")
    options.define(
        "log_to_stderr",
        type=bool,
        default=None,
        help=("Send log output to stderr (colorized if possible). "
              "By default use stderr if --log_file_prefix is not set and "
              "no other logging is configured."))
    options.define(
        "log_file_prefix",
        type=str,
        default=None,
        metavar="PATH",
        help=("Path prefix for log files. "
              "Note that if you are running multiple tornado processes, "
              "log_file_prefix must be different for each of them (e.g. "
              "include the port number)"))
    options.define("log_file_max_size",
                   type=int,
                   default=100 * 1000 * 1000,
                   help="max size of log files before rollover")
    options.define("log_file_num_backups",
                   type=int,
                   default=10,
                   help="number of log files to keep")

    options.define(
        "log_rotate_when",
        type=str,
        default='midnight',
        help=("specify the type of TimedRotatingFileHandler interval "
              "other options:('S', 'M', 'H', 'D', 'W0'-'W6')"))
    options.define("log_rotate_interval",
                   type=int,
                   default=1,
                   help="The interval value of timed rotating")

    options.define("log_rotate_mode",
                   type=str,
                   default='size',
                   help="The mode of rotating files(time or size)")

    options.add_parse_callback(lambda: enable_pretty_logging(options))
Esempio n. 25
0
from pprint import pprint

from elasticsearch import Elasticsearch
from elasticsearch.exceptions import TransportError
from tornado.options import options, parse_command_line

options.define('host', default="localhost:9200")
options.define('pattern', default="outbreak-resources-*")

if __name__ == "__main__":
    parse_command_line()
    try:
        client = Elasticsearch(options.host)
        client.ingest.put_pipeline(
            'resources-common', {
                "description":
                "compose date field",
                "processors": [{
                    "set": {
                        "field": "_timestamp",
                        "value": "{{_ingest.timestamp}}"
                    }
                }, {
                    "script": {
                        "source":
                        """
                            boolean validDate(def ctx, def field, def now) {
                                if (ctx.containsKey(field)) {
                                    def date;
                                    try {
                                        date = LocalDate.parse(ctx[field], DateTimeFormatter.ISO_LOCAL_DATE);
Esempio n. 26
0
Try:

python -m scripts.add_dataset --help

Test Data:
Git https://github.com/data2health/schemas/blob/master/Dataset/examples/wellderly/wellderly_dataset.json
Raw https://raw.githubusercontent.com/data2health/schemas/master/Dataset/examples/wellderly/wellderly_dataset.json
"""

import logging

from tornado.options import options, parse_command_line

from discovery.registry import datasets

options.define('url')
options.define('user', default='*****@*****.**')
options.define('schema', default='ctsa::bts:CTSADataset')
options.define('private', default=False, type=bool)

logging.getLogger('elasticsearch').setLevel('WARNING')

if __name__ == "__main__":
    parse_command_line()
    assert options.url
    print(
        datasets.add(doc=options.url,
                     user=options.user,
                     schema=options.schema,
                     private=options.private))
Esempio n. 27
0
from tornado import gen
from tornado import iostream
from tornado import web
from tornado.options import define, options
from tornado.log import *
from tornado.concurrent import Future
from tornado.queues import Queue
from tornado.locks import Semaphore, Condition

import config as server_config
from handlers import *
from cache import *
from coroutine_msgbus import *

options.define('suppress_access_log',
               default=False,
               help='whether to suppress the access_log of tornado.log')

BS = AES.block_size
pad = lambda s: s if (len(s) % BS == 0) else (s + (BS - len(s) % BS) * chr(0))
unpad = lambda s: s.rstrip(chr(0))

PENDING_REQ_CNT = 10
HEARTBEAT_PERIOD_SEC = 60
HEARTBEAT_FAST_PING_DELAY_SEC = 3
HEARTBEAT_NEGATIVE_CHECK_DELAY_SEC = 10
STATISTICS_PERIOD_SEC = 120


class DeviceConnection(object):
Esempio n. 28
0
def main():
    file_io.set_logging_format()
    logging.getLogger().setLevel(logging.INFO)

    zk_ips = appscale_info.get_zk_node_ips()
    zk_client = KazooClient(hosts=','.join(zk_ips))
    zk_client.start()

    deployment_config = DeploymentConfig(zk_client)
    projects_manager = GlobalProjectsManager(zk_client)
    thread_pool = ThreadPoolExecutor(MAX_BACKGROUND_WORKERS)
    source_manager = SourceManager(zk_client, thread_pool)
    source_manager.configure_automatic_fetch(projects_manager)
    service_operator = ServiceOperator(thread_pool)

    options.define('private_ip', appscale_info.get_private_ip())
    options.define('syslog_server', appscale_info.get_headnode_ip())
    options.define('db_proxy', appscale_info.get_db_proxy())
    options.define('load_balancer_ip',
                   appscale_info.get_load_balancer_ips()[0])
    options.define('tq_proxy', appscale_info.get_tq_proxy())
    options.define('secret', appscale_info.get_secret())

    routing_client = RoutingClient(zk_client, options.private_ip,
                                   options.secret)
    instance_manager = InstanceManager(zk_client, service_operator,
                                       routing_client, projects_manager,
                                       deployment_config, source_manager,
                                       options.syslog_server, thread_pool,
                                       options.private_ip)
    instance_manager.start()

    logger.info('Starting AppManager')

    io_loop = IOLoop.current()
    io_loop.run_sync(instance_manager.populate_api_servers)
    io_loop.start()
Esempio n. 29
0
if __name__ == '__main__':

#    domoweb.VERSION = "dev.%s" % commands.getoutput("cd %s ; hg id -n 2>/dev/null" % domoweb.PROJECTPATH)

    # Check log folder
    if not os.path.isdir("/var/log/domoweb"):
        sys.stderr.write("Error: /var/log/domoweb do not exist")
        sys.exit(1)

    # Check config file
    SERVER_CONFIG = '/etc/domoweb.cfg'
    if not os.path.isfile(SERVER_CONFIG):
        sys.stderr.write("Error: Can't find the file '%s'\n" % SERVER_CONFIG)
        sys.exit(1)

    options.define("sqlite_db", default="/var/lib/domoweb/db.sqlite", help="Database file path", type=str)
    options.define("port", default=40404, help="Launch on the given port (http)", type=int)
    options.define("ssl_port", default=40405, help="Launch on the given port (https)", type=int)
    options.define("debug", default=False, help="Debug mode", type=bool)
    options.define("rest_url", default="http://127.0.0.1:40406/rest", help="RINOR REST Url", type=str)
    options.define("develop", default=False, help="Develop mode", type=bool)
    options.define("use_ssl", default=False, help="Use SSL", type=bool)
    options.define("ssl_certificate", default="ssl_cert.pem", help="SSL certificate file path", type=str)
    options.define("ssl_key", default="ssl_key.pem", help="SSL key file path", type=str)
    options.parse_config_file(SERVER_CONFIG)

    logger.info("Running from : %s" % domoweb.PROJECTPATH)

    packLoader.loadWidgets(domoweb.PACKSPATH, options.develop)
    packLoader.loadThemes(domoweb.PACKSPATH, options.develop)
    # TODO
Esempio n. 30
0
        if handler.get_status() < 400:
            log_method = access_log.info
        elif handler.get_status() < 500:
            log_method = access_log.warning
        else:
            log_method = access_log.error
        request_time = 1000.0 * handler.request.request_time()
        source = handler.request.headers.get("source", "unknow")
        origin_ip = handler.request.headers.get("origin_ip", "unknow")

        log_method("%d %s %.2fms %s %s", handler.get_status(),
                   handler._request_summary(), request_time, source, origin_ip)


if __name__ == "__main__":
    options.define(name="config", default="dev")
    options.define(name="port", default="22345")
    options.define(name="process", default=1)
    options.define(name="name", default="default")

    options.parse_command_line()
    Connection(env=options.config)
    LoadModule()

    APP = Application()

    logging.info("Starting API Server...")
    logging.info("Listening on port: %s" % options.port)

    SERVER = HTTPServer(APP, xheaders=True)
    SERVER.bind(int(options.port))
Esempio n. 31
0
def main():
  """ Starts the AdminServer. """
  logging.basicConfig(format=LOG_FORMAT, level=logging.INFO)

  parser = argparse.ArgumentParser(
    prog='appscale-admin', description='Manages AppScale-related processes')
  subparsers = parser.add_subparsers(dest='command')
  subparsers.required = True

  serve_parser = subparsers.add_parser(
    'serve', description='Starts the server that manages AppScale processes')
  serve_parser.add_argument(
    '-p', '--port', type=int, default=constants.DEFAULT_PORT,
    help='The port to listen on')
  serve_parser.add_argument(
    '-v', '--verbose', action='store_true', help='Output debug-level logging')

  subparsers.add_parser(
    'summary', description='Lists AppScale processes running on this machine')
  restart_parser = subparsers.add_parser(
    'restart',
    description='Restart AppScale processes running on this machine')
  restart_parser.add_argument('service', nargs='+',
                              help='The process or service ID to restart')

  args = parser.parse_args()
  if args.command == 'summary':
    table = sorted(list(get_combined_services().items()))
    print(tabulate(table, headers=['Service', 'State']))
    sys.exit(0)

  if args.command == 'restart':
    socket_path = urlquote(ServiceManagerHandler.SOCKET_PATH, safe='')
    session = requests_unixsocket.Session()
    response = session.post(
      'http+unix://{}/'.format(socket_path),
      data={'command': 'restart', 'arg': [args.service]})
    response.raise_for_status()
    return

  if args.verbose:
    logging.getLogger('appscale').setLevel(logging.DEBUG)

  options.define('secret', appscale_info.get_secret())
  options.define('login_ip', appscale_info.get_login_ip())
  options.define('private_ip', appscale_info.get_private_ip())
  options.define('zk_locations', appscale_info.get_zk_node_ips())
  options.define('load_balancers', appscale_info.get_load_balancer_ips())

  acc = appscale_info.get_appcontroller_client()
  ua_client = UAClient(appscale_info.get_db_master_ip(), options.secret)
  zk_client = KazooClient(
    hosts=','.join(options.zk_locations),
    connection_retry=ZK_PERSISTENT_RECONNECTS)
  zk_client.start()
  version_update_lock = zk_client.Lock(constants.VERSION_UPDATE_LOCK_NODE)
  thread_pool = ThreadPoolExecutor(4)
  monit_operator = MonitOperator()
  all_resources = {
    'acc': acc,
    'ua_client': ua_client,
    'zk_client': zk_client,
    'version_update_lock': version_update_lock,
    'thread_pool': thread_pool
  }

  if options.private_ip in appscale_info.get_taskqueue_nodes():
    logger.info('Starting push worker manager')
    GlobalPushWorkerManager(zk_client, monit_operator)

  if options.private_ip in appscale_info.get_load_balancer_ips():
    logger.info('Starting RoutingManager')
    routing_manager = RoutingManager(zk_client)
    routing_manager.start()

  service_manager = ServiceManager(zk_client)
  service_manager.start()

  controller_state = ControllerState(zk_client)

  app = web.Application([
    ('/oauth/token', OAuthHandler, {'ua_client': ua_client}),
    ('/v1/apps/([^/]*)/services/([^/]*)/versions', VersionsHandler,
     {'acc': acc, 'ua_client': ua_client, 'zk_client': zk_client,
      'version_update_lock': version_update_lock, 'thread_pool': thread_pool,
      'controller_state': controller_state}),
    ('/v1/projects', ProjectsHandler, all_resources),
    ('/v1/projects/([a-z0-9-]+)', ProjectHandler, all_resources),
    ('/v1/apps/([^/]*)/services', ServicesHandler,
     {'ua_client': ua_client, 'zk_client': zk_client}),
    ('/v1/apps/([^/]*)/services/([^/]*)', ServiceHandler,
     all_resources),
    ('/v1/apps/([^/]*)/services/([^/]*)/versions/([^/]*)',
     VersionHandler,
     {'acc': acc, 'ua_client': ua_client, 'zk_client': zk_client,
      'version_update_lock': version_update_lock, 'thread_pool': thread_pool,
      'controller_state': controller_state}),
    ('/v1/apps/([^/]*)/operations/([a-z0-9-]+)', OperationsHandler,
     {'ua_client': ua_client}),
    ('/api/cron/update', UpdateCronHandler,
     {'acc': acc, 'zk_client': zk_client, 'ua_client': ua_client}),
    ('/api/datastore/index/add', UpdateIndexesHandler,
     {'zk_client': zk_client, 'ua_client': ua_client}),
    ('/api/queue/update', UpdateQueuesHandler,
     {'zk_client': zk_client, 'ua_client': ua_client})
  ])
  logger.info('Starting AdminServer')
  app.listen(args.port)

  management_app = web.Application([
    ('/', ServiceManagerHandler, {'service_manager': service_manager})])
  management_server = HTTPServer(management_app)
  management_socket = bind_unix_socket(ServiceManagerHandler.SOCKET_PATH)
  management_server.add_socket(management_socket)

  io_loop = IOLoop.current()
  io_loop.start()
Esempio n. 32
0
import motor

from os import path
from tornado.ioloop import IOLoop
from tornado.options import options
from requesthandlers.api import ApiHandler
from requesthandlers.api.player import PlayerHandler
from requesthandlers.api.user import UserHandler
from requesthandlers.api.tournament import TournamentHandler
from requesthandlers.api.section import SectionHandler
from requesthandlers.api.session import SessionHandler
from requesthandlers.api.lookups import LookupsHandler
from requesthandlers import IndexPageHandler, VerifyPageHandler
from app import CustomApp

options.define('port', default=8888, help='run on the given port', type=int)

def load_app_settings():
    db = pymongo.MongoClient().chessrank
    return db.settings.find_one()

def main():
    server_path   = path.dirname(__file__)
    template_path = path.join(server_path, 'templates')
    static_path   = path.normpath(path.join(server_path, '..', 'client'))
    
    settings = {
                  'static_path': static_path,
                'template_path': template_path,
                 'xsrf_cookies': False, # TODO: Enable
                    'login_url': '/',
Esempio n. 33
0
# -*- coding: utf-8 -*-

import tornado.ioloop
import tornado.web
from tornado.options import options
from logbook import RotatingFileHandler, info

from . import handler
from . import applicationconfig


options.define("proxy", type=bool, default=False, help="if proxy environment, indicate this option.")


def get_application():
    return tornado.web.Application([(r"/(?P<service>.+)", handler.Proxy, dict(needProxy=options.proxy))])


def log_setup():
    log = RotatingFileHandler(applicationconfig.LOG_PATH, max_size=104857600, backup_count=5)
    log.push_application()


def start():
    tornado.options.parse_command_line()

    application = get_application()
    application.listen(applicationconfig.PORT)
    log_setup()
    tornado.ioloop.IOLoop.current().start()
    info("Proxy run on port:{0}", applicationconfig.PORT)
Esempio n. 34
0
def bootstrap(config_file=None):
    options.define('config', config_file or tinyurld.default_config, type=str, help='Config file path')
    options.define('host', '0.0.0.0', type=str, help='Ip address for bind')
    options.define('port', 8888, type=int, help='application port')
    options.define('autoreload', False, type=bool,
                   help='Autoreload application after change files', group='application')
    options.define('debug', False, type=bool, help='Debug mode', group='application')
    options.define('mongo_host', type=str, help='MongoDB host IP', group='mongodb')
    options.define('mongo_port', 27017, type=int, help='MongoDB port', group='mongodb')
    options.define('mongo_user', None, type=str, help='MongoDB user', group='mongodb')
    options.define('mongo_password', None, type=str, help='MongoDB user password', group='mongodb')
    options.parse_command_line()

    options.parse_config_file(options.config)

    # override options from config file with command line args
    options.parse_command_line()
    tornado.log.app_log.info('Read config: {}'.format(options.config))
Esempio n. 35
0
import uvloop
from tornado.platform.asyncio import AsyncIOMainLoop

asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
AsyncIOMainLoop().install()

import aiohttp
import os
import tornado.httpserver
from tornado.httpclient import AsyncHTTPClient
import tornado.ioloop
import tornado.web
import ujson as json
from tornado.options import options

options.define('port', default=8080, type=int, help="Server port")
GO_SLEEP_ADDRESS = os.getenv('GO_SLEEP_ADDRESS', '127.0.0.1:8090')

_connector = aiohttp.TCPConnector(ttl_dns_cache=300,
                                  limit=10000,
                                  keepalive_timeout=30)


class JsonHandler(tornado.web.RequestHandler):
    def set_default_headers(self):
        self.set_header("Content-Type", "application/json")

    def write_response(self, data):
        self.write(json.dumps(data))

Esempio n. 36
0
def main():
    """ Starts a web service for handing datastore requests. """

    global datastore_access
    global server_node
    global zk_client
    zookeeper_locations = appscale_info.get_zk_locations_string()
    if not zookeeper_locations:
        zookeeper_locations = 'localhost:2181'

    parser = argparse.ArgumentParser()
    parser.add_argument('-t',
                        '--type',
                        choices=dbconstants.VALID_DATASTORES,
                        default=dbconstants.VALID_DATASTORES[0],
                        help='Database type')
    parser.add_argument('--fdb-clusterfile',
                        default=None,
                        help='Location of FoundationDB clusterfile')
    parser.add_argument('-p',
                        '--port',
                        type=int,
                        default=dbconstants.DEFAULT_PORT,
                        help='Datastore server port')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Output debug-level logging')
    args = parser.parse_args()

    if args.verbose:
        logging.getLogger('appscale').setLevel(logging.DEBUG)

    options.define('private_ip', appscale_info.get_private_ip())
    options.define('port', args.port)
    taskqueue_locations = get_load_balancer_ips()

    server_node = '{}/{}:{}'.format(DATASTORE_SERVERS_NODE, options.private_ip,
                                    options.port)

    retry_policy = KazooRetry(max_tries=5)
    zk_client = kazoo.client.KazooClient(
        hosts=zookeeper_locations,
        connection_retry=ZK_PERSISTENT_RECONNECTS,
        command_retry=retry_policy)
    zk_client.start()

    if args.type == 'cassandra':
        datastore_batch = DatastoreFactory.getDatastore(
            args.type, log_level=logger.getEffectiveLevel())
        zookeeper = zktransaction.ZKTransaction(
            zk_client=zk_client,
            db_access=datastore_batch,
            log_level=logger.getEffectiveLevel())
        transaction_manager = TransactionManager(zk_client)
        datastore_access = DatastoreDistributed(
            datastore_batch,
            transaction_manager,
            zookeeper=zookeeper,
            log_level=logger.getEffectiveLevel(),
            taskqueue_locations=taskqueue_locations)
    else:
        from appscale.datastore.fdb.fdb_datastore import FDBDatastore
        clusterfile_path = args.fdb_clusterfile
        if not clusterfile_path:
            try:
                clusterfile_content = zk_client.get(FDB_CLUSTERFILE_NODE)[0]
                clusterfile_path = '/run/appscale/appscale-datastore-fdb.cluster'
                with open(clusterfile_path, 'w') as clusterfile:
                    clusterfile.write(clusterfile_content)
            except NoNodeError:
                logger.warning(
                    'Neither --fdb-clusterfile was specified nor {} ZK node exists,'
                    'FDB client will try to find clusterfile in one of default locations'
                    .format(FDB_CLUSTERFILE_NODE))
        datastore_access = FDBDatastore()
        datastore_access.start(clusterfile_path)

    zk_client.add_listener(zk_state_listener)
    zk_client.ensure_path(DATASTORE_SERVERS_NODE)
    # Since the client was started before adding the listener, make sure the
    # server node gets created.
    zk_state_listener(zk_client.state)
    zk_client.ChildrenWatch(DATASTORE_SERVERS_NODE, update_servers_watch)

    index_manager = IndexManager(zk_client,
                                 datastore_access,
                                 perform_admin=True)
    if args.type == 'cassandra':
        datastore_access.index_manager = index_manager
    else:
        datastore_access.index_manager.composite_index_manager = index_manager

    server = tornado.httpserver.HTTPServer(pb_application)
    server.listen(args.port)

    IOLoop.current().start()
Esempio n. 37
0
def open_run_save(api, path):
    """open a notebook, run it, and save.
    
    Only the original notebook is saved, the output is not recorded.
    """
    nb = api.get_notebook(path)
    session = Session()
    kernel = yield api.new_kernel(session.session)
    try:
        yield run_notebook(nb, kernel, session)
    finally:
        api.kill_kernel(kernel['id'])
    gen_log.info("Saving %s/notebooks/%s", api.url, path)
    api.save_notebook(nb, path)


if __name__ == '__main__':
    enable_pretty_logging()
    options.define("url",
                   default="http://localhost:8888",
                   help="The base URL of the notebook server to test")
    args = options.parse_command_line()
    paths = args or ['Untitled0.ipynb']

    api = NBAPI(url=options.url.rstrip('/'))
    loop = IOLoop.current()

    for path in paths:
        gen_log.info("Running %s/notebooks/%s", api.url, path)
        loop.run_sync(lambda: open_run_save(api, path))
Esempio n. 38
0
from tornado import ioloop
from tornado import gen
from tornado import iostream
from tornado import web
from tornado.options import define, options
from tornado.log import *
from tornado.concurrent import Future
from tornado.queues import Queue
from tornado.locks import Semaphore, Condition

import config as server_config
from handlers import *
from cache import *
from coroutine_msgbus import *

options.define('suppress_access_log', default=False, help='whether to suppress the access_log of tornado.log')

BS = AES.block_size
pad = lambda s: s if (len(s) % BS == 0) else (s + (BS - len(s) % BS) * chr(0) )
unpad = lambda s : s.rstrip(chr(0))

PENDING_REQ_CNT = 10
HEARTBEAT_PERIOD_SEC = 60
HEARTBEAT_FAST_PING_DELAY_SEC = 3
HEARTBEAT_NEGATIVE_CHECK_DELAY_SEC = 10
STATISTICS_PERIOD_SEC = 120

class DeviceConnection(object):

    state_waiters = {}
    state_happened = {}
Esempio n. 39
0
#-*- coding:utf-8 -*-
from tornado import web, websocket, ioloop
import json
from tornado.options import options

options.define("port", default=8888, type=int)

cl = []

class IndexHandler(web.RequestHandler):
    def get(self):
        self.render("index.html");

class SocketHandler(websocket.WebSocketHandler):
    def open(self):
        if self not in cl:
            cl.append(self)

    def on_close(self):
        if self in cl:
            cl.remove(self)

    def on_message(self, message):
        #data = {"message": message}
        #data = json.dumps(data, ensure_ascii=False)
        print("Client(%s) said: %s" % (cl.index(self), message))
        for c in cl:
            if c != self:
                c.write_message(message)

app = web.Application([
Esempio n. 40
0
################################
if __name__ == "__main__":
    file_io.set_logging_format()
    logging.getLogger().setLevel(logging.INFO)

    zk_ips = appscale_info.get_zk_node_ips()
    zk_client = KazooClient(hosts=','.join(zk_ips))
    zk_client.start()

    deployment_config = DeploymentConfig(zk_client)
    projects_manager = GlobalProjectsManager(zk_client)
    thread_pool = ThreadPoolExecutor(MAX_BACKGROUND_WORKERS)
    source_manager = SourceManager(zk_client, thread_pool)
    source_manager.configure_automatic_fetch(projects_manager)

    options.define('private_ip', appscale_info.get_private_ip())
    options.define('syslog_server', appscale_info.get_headnode_ip())
    options.define('db_proxy', appscale_info.get_db_proxy())
    options.define('tq_proxy', appscale_info.get_tq_proxy())
    options.define('secret', appscale_info.get_secret())

    running_instances = recover_state(zk_client)
    PeriodicCallback(stop_failed_instances,
                     INSTANCE_CLEANUP_INTERVAL * 1000).start()

    app = tornado.web.Application([('/versions/([a-z0-9-_]+)', VersionHandler),
                                   ('/versions/([a-z0-9-_]+)/([0-9-]+)',
                                    InstanceHandler)])

    app.listen(constants.APP_MANAGER_PORT)
    logging.info('Starting AppManager on {}'.format(
import tornado.ioloop
import tornado.web
import tornado.httpserver

from random import randint
from tornado import gen
from tornado.options import options
from commons import (
    JsonHandler,
    JsonHelloWorldHandler,
    PlaintextHelloWorldHandler,
    HtmlHandler,
)


options.define("port", default=8888, type=int, help="Server port")
options.define("mongo", default="localhost", type=str, help="MongoDB host")
options.define("backlog", default=8192, type=int, help="Server backlog")


class SingleQueryHandler(JsonHandler):
    @gen.coroutine
    def get(self):
        world = yield db.world.find_one(randint(1, 10000))
        world = {
            self.ID: int(world["_id"]),
            self.RANDOM_NUMBER: int(world[self.RANDOM_NUMBER]),
        }

        response = json.dumps(world)
        self.finish(response)
Esempio n. 42
0
"""
Update meta guide field for existing docs prior to addition of such field.
"""

import logging

from tornado.options import options, parse_command_line

from discovery.model.dataset import DatasetMetadata

options.define('cd2h', default='/guide')
options.define('niaid', default='/guide/niaid')
options.define('outbreak', default='/guide/outbreak/dataset')
options.define('n3c', default='/guide/n3c/dataset')

options.define('test', default='/TEST')


def updateDocs():
    '''
        Update meta guide field
    '''
    docs = DatasetMetadata.search(private=False)
    for doc in docs.scan():
        has_guide = getattr(getattr(doc, "_meta", None), 'guide', None)
        if not has_guide:
            print(doc)
            if doc['@type'] == "outbreak:Dataset":
                res = doc.update(**{'_meta': {'guide': options.outbreak}})
                logging.info(f'[Outbreak] Updating guide field for doc {doc}')
            elif doc['@type'] == "niaid:NiaidDataset":
Esempio n. 43
0
# -*- coding: utf-8 -*-

import tornado.ioloop
import tornado.web
from tornado.options import options
from logbook import RotatingFileHandler, info

from . import handler
from . import applicationconfig

options.define('proxy',
               type=bool,
               default=False,
               help="if proxy environment, indicate this option.")


def get_application():
    return tornado.web.Application([
        (r"/(?P<service>.+)", handler.Proxy, dict(needProxy=options.proxy)),
    ])


def log_setup():
    log = RotatingFileHandler(applicationconfig.LOG_PATH,
                              max_size=104857600,
                              backup_count=5)
    log.push_application()


def start():
    tornado.options.parse_command_line()
Esempio n. 44
0
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import os, subprocess, base64, tempfile, shutil, sys, uuid, logging, time
import tornado.ioloop
import tornado.web
from tornado.options import define, options, parse_command_line

options.define("port", default=8888, type=int)
options.define("branch", default="HEAD")
options.define("base", default=".")
options.define("enable_upload", default=None, multiple=True, type=str)
options.define("access", type=str, multiple=True)
options.define("index_path", type=str, default="index.html")
options.define("cookie_secret", type=str, default="barebones.js")

log = []


def now():
    return time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime())


def _add_to_log(level, ctx, fmt, *args, **kwargs):
Esempio n. 45
0
def main():
    """ Starts a web service for handing datastore requests. """

    global datastore_access
    global server_node
    global zookeeper
    zookeeper_locations = appscale_info.get_zk_locations_string()

    parser = argparse.ArgumentParser()
    parser.add_argument('-t',
                        '--type',
                        choices=dbconstants.VALID_DATASTORES,
                        default=dbconstants.VALID_DATASTORES[0],
                        help='Database type')
    parser.add_argument('-p',
                        '--port',
                        type=int,
                        default=dbconstants.DEFAULT_PORT,
                        help='Datastore server port')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Output debug-level logging')
    args = parser.parse_args()

    if args.verbose:
        logging.getLogger('appscale').setLevel(logging.DEBUG)

    options.define('private_ip', appscale_info.get_private_ip())
    options.define('port', args.port)
    taskqueue_locations = get_load_balancer_ips()

    server_node = '{}/{}:{}'.format(DATASTORE_SERVERS_NODE, options.private_ip,
                                    options.port)

    datastore_batch = DatastoreFactory.getDatastore(
        args.type, log_level=logger.getEffectiveLevel())
    zookeeper = zktransaction.ZKTransaction(
        host=zookeeper_locations,
        db_access=datastore_batch,
        log_level=logger.getEffectiveLevel())

    zookeeper.handle.add_listener(zk_state_listener)
    zookeeper.handle.ensure_path(DATASTORE_SERVERS_NODE)
    # Since the client was started before adding the listener, make sure the
    # server node gets created.
    zk_state_listener(zookeeper.handle.state)
    zookeeper.handle.ChildrenWatch(DATASTORE_SERVERS_NODE,
                                   update_servers_watch)

    transaction_manager = TransactionManager(zookeeper.handle)
    datastore_access = DatastoreDistributed(
        datastore_batch,
        transaction_manager,
        zookeeper=zookeeper,
        log_level=logger.getEffectiveLevel(),
        taskqueue_locations=taskqueue_locations)
    index_manager = IndexManager(zookeeper.handle,
                                 datastore_access,
                                 perform_admin=True)
    datastore_access.index_manager = index_manager

    server = tornado.httpserver.HTTPServer(pb_application)
    server.listen(args.port)

    IOLoop.current().start()
Esempio n. 46
0
import sys

from functools import partial
from tornado import escape, gen
from tornado.options import options
from tornado.template import Template
from urllib import urlencode
from viewfinder.backend.base import main, util
from viewfinder.backend.db.db_client import DBClient, DBKey
from viewfinder.backend.db.settings import AccountSettings
from viewfinder.backend.db.user import User
from viewfinder.backend.services.email_mgr import EmailManager, SendGridEmailManager
from viewfinder.backend.services.sms_mgr import SMSManager, TwilioSMSManager

options.define("email_template", default=None, type=str, help="name of the .email template file to use")
options.define("email_subject", default="New Viewfinder Features", type=str, help="subject to relay with email message")
options.define("sms_template", default=None, type=str, help="name of the .sms template file to use")
options.define(
    "min_user_id", default=11, type=int, help="only users with ids >= this id will be sent email/SMS (-1 for no min)"
)
options.define(
    "max_user_id", default=11, type=int, help="only users with ids <= this id will be sent email/SMS (-1 for no max)"
)
options.define(
    "honor_allow_marketing",
    default=True,
    type=bool,
    help="do not send the email/SMS if the user has turned off marketing emails",
)
options.define(
Esempio n. 47
0
                (uid, user_name, lat, lng))
        db_conn.commit()
    user_name_to_uid = {}
    for uid, user_name in db_cursor.execute(
            "SELECT uid, user_name FROM users").fetchall():
        user_name_to_uid[user_name.lower()] = uid
    positions, now = {}, int(time.time())
    for uid, user_name, lat, lng in db_cursor.execute(
            "SELECT uid, user_name, lat, lng FROM users WHERE lat IS NOT NULL AND lng IS NOT NULL"
    ).fetchall():
        positions[uid] = [lat, lng, now, user_name]


db_init("ld_map.db")

options.define("port", default=28285, type=int)
options.define("cookie_secret", type=str, default="ld_map")

log = []


def now():
    return time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime())


def _add_to_log(level, ctx, fmt, *args, **kwargs):
    log.append((level, now(), ctx, fmt % args))
    logging.log(level, fmt, *args, **kwargs)


class BaseHandler(tornado.web.RequestHandler):
Esempio n. 48
0
import tornado.web
from tornado.ioloop import IOLoop
from tornado import httpserver, gen
from tornado.options import define, options
import tornado.escape
import datetime
from motor import MotorClient
from pymongo import ASCENDING,DESCENDING
from bson.objectid import ObjectId
from parse import parse_message
from tcp_server import TCPSocketHandler

# Capped collection, es una coleccion con numero maximo de
# elementos, nos permite hacer cursores "tailables", lo
# cual nos permite hacer long polling
options.define("database", default="noobe", help="Base de datos de mongoDB")
options.define("capped-coll", default="cappedData", help="'Capped colection' a utilizar")
options.define("store-coll", default="data", help="Coleccion donde almacenar la data")

options.define("port", default=8080, help="run HTTP server on the given port", type=int)
options.define("tcp-port", default=8989, help="run TCP server on the given port", type=int)

_db_object = MotorClient()[options["database"]]

@gen.coroutine
def insert_data(data_dict, db=_db_object,
                store_coll=options["store-coll"],
                capped_coll=options["capped-coll"]):
    """Inserta un objeto en la base de datos"""
    item = data_dict.copy()
    item["_id"] = ObjectId()
Esempio n. 49
0
    
    Only the original notebook is saved, the output is not recorded.
    """
    nb = api.get_notebook(path)
    session = Session()
    kernel = yield api.new_kernel(session.session, legacy=legacy)
    try:
        yield run_notebook(nb, kernel, session)
    finally:
        api.kill_kernel(kernel['id'])
    gen_log.info("Saving %s/notebooks/%s", api.url, path)
    api.save_notebook(nb, path)

if __name__ == '__main__':
    enable_pretty_logging()
    options.define("url", default="http://127.0.0.1:8888",
        help="The base URL of the notebook server to test"
    )
    options.define("legacy", type=bool, default=False,
        help="Use legacy (2.x) websocket handshake"
    )
    args = options.parse_command_line()
    paths = args or ['Untitled0.ipynb']
    
    api = NBAPI(url=options.url.rstrip('/'))
    loop = IOLoop.current()
    
    for path in paths:
        gen_log.info("Running %s/notebooks/%s", api.url, path)
        loop.run_sync(lambda : open_run_save(api, path, options.legacy))
Esempio n. 50
0
def main():
    """ Main. """
    parser = argparse.ArgumentParser()
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Output debug-level logging')
    args = parser.parse_args()

    logging.basicConfig(format=LOG_FORMAT, level=logging.INFO)
    if args.verbose:
        logging.getLogger().setLevel(logging.DEBUG)

    options.define('secret', appscale_info.get_secret())

    signal.signal(signal.SIGTERM, signal_handler)
    signal.signal(signal.SIGINT, signal_handler)

    my_ip = appscale_info.get_private_ip()
    is_master = (my_ip == appscale_info.get_headnode_ip())
    is_lb = (my_ip in appscale_info.get_load_balancer_ips())

    if is_master:
        # Periodically check with the portal for new tasks.
        # Note: Currently, any active handlers from the tornado app will block
        # polling until they complete.
        PeriodicCallback(poll, constants.POLLING_INTERVAL).start()

        # Only master Hermes node handles /do_task route
        task_route = ('/do_task', TaskHandler)

        global zk_client
        zk_client = KazooClient(hosts=','.join(
            appscale_info.get_zk_node_ips()),
                                connection_retry=ZK_PERSISTENT_RECONNECTS)
        zk_client.start()
        # Start watching profiling configs in ZooKeeper
        stats_app.ProfilingManager(zk_client)

        # Periodically checks if the deployment is registered and uploads the
        # appscalesensor app for registered deployments.
        sensor_deployer = SensorDeployer(zk_client)
        PeriodicCallback(sensor_deployer.deploy,
                         constants.UPLOAD_SENSOR_INTERVAL).start()
    else:
        task_route = (
            '/do_task', Respond404Handler,
            dict(reason='Hermes slaves do not manage tasks from Portal'))

    app = tornado.web.Application(
        [
            ("/", MainHandler),
            task_route,
        ] + stats_app.get_local_stats_api_routes(is_lb) +
        stats_app.get_cluster_stats_api_routes(is_master),
        debug=False)
    app.listen(constants.HERMES_PORT)

    # Start loop for accepting http requests.
    IOLoop.instance().start()

    logging.info("Hermes is up and listening on port: {}.".format(
        constants.HERMES_PORT))
Esempio n. 51
0
#!/usr/bin/env python

import json
import motor
import tornado.ioloop
import tornado.web
import tornado.httpserver

from random import randint
from tornado import gen
from tornado.options import options
from commons import JsonHandler, JsonHelloWorldHandler, PlaintextHelloWorldHandler, HtmlHandler

options.define('port', default=8888, type=int, help="Server port")
options.define('mongo', default='localhost', type=str, help="MongoDB host")
options.define('backlog', default=8192, type=int, help="Server backlog")


class SingleQueryHandler(JsonHandler):
    @gen.coroutine
    def get(self):
        world = yield db.world.find_one(randint(1, 10000))
        world = {
            self.ID: int(world['_id']),
            self.RANDOM_NUMBER: int(world[self.RANDOM_NUMBER])
        }

        response = json.dumps(world)
        self.finish(response)

Esempio n. 52
0
define("database_uri",
       default="sqlite:////tmp/apoptosis.db",
       help="Database URI")

define("http_port", default=5000, help="HTTP Port")

define("tornado_secret", help="Tornado Secret")
define("tornado_translations", help="Tornado translations path")
define("tornado_templates", help="Tornado templates path")
define("tornado_static", help="Tornado static path")

define("evesso_clientid", help="EVE SSO client ID")
define("evesso_secretkey", help="EVE SSO secret key")
define("evesso_callback", help="EVE SSO callback URI")

options.define("slack_apitoken", help="Slack API Token")
options.define("slack_username", help="Slack username", default="apoptosis")

parse_config_file("/home/user/apoptosis.conf")  # XXX correct location

auth_name = options.auth_name

redis_host = options.redis_host
redis_port = options.redis_port
redis_database = options.redis_database
redis_password = options.redis_password

database_uri = options.database_uri

http_port = options.http_port
Esempio n. 53
0
    Assume elasticsearch related setups exist on the target server.

    python -m scripts.outbreak --help

"""

import logging
from datetime import datetime
import requests
from elasticsearch import Elasticsearch, RequestError
from tornado.options import options, parse_command_line

from discovery.model.dataset import Dataset

options.define('target_host', default='api.outbreak.info')

MAPPING_URL = 'https://raw.githubusercontent.com/SuLab/outbreak.info-resources/master/outbreak_resources_es_mapping.json'
INDEX_PREFIX = 'outbreak-dataset-'
INDEX_ALIAS = 'outbreak-resources-dataset'


def main():

    parse_command_line()
    client = Elasticsearch(options.target_host)

    # create index
    datestring = ''.join(str(item) for item in datetime.now().timetuple()[:-1])
    index_name = INDEX_PREFIX + datestring
    _ = client.indices.create(index_name, {
Esempio n. 54
0
from appscale.admin.instance_manager import (
  instance_manager as instance_manager_module)
from appscale.admin.instance_manager import InstanceManager
from appscale.admin.instance_manager import instance
from appscale.admin.instance_manager import utils
from appscale.common import (
  file_io,
  appscale_info,
  misc,
  monit_interface,
  testing
)
from appscale.common import monit_app_configuration
from appscale.common.monit_interface import MonitOperator

options.define('login_ip', '127.0.0.1')
options.define('syslog_server', '127.0.0.1')
if not hasattr(options, 'private_ip'):
  options.define('private_ip', '<private_ip>')

options.define('db_proxy', '<private_ip>')
options.define('load_balancer_ip', '<private_ip>')
options.define('tq_proxy', '<private_ip>')


class TestInstanceManager(AsyncTestCase):
  @gen_test
  def test_start_app_goodconfig_python(self):
    testing.disable_logging()

    version_details = {'runtime': 'python27',
Esempio n. 55
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# auth: wallace.wang

from tornado import web
from tornado import ioloop

from tornado import httpserver
from tornado.options import options
from core.serverHandlers import AuthHandler, ServerHandler

options.define('port', 8005, help='run help', type=int)

application = web.Application(
    handlers=[
        (r'/operate', ServerHandler),
        (r'/auth', AuthHandler),
    ],
    autoreload=True,
)

if __name__ == '__main__':
    options.parse_command_line()
    http_server = httpserver.HTTPServer(application)
    http_server.listen(options.port)
    ioloop.IOLoop.instance().start()
import json
import motor
import tornado.ioloop
import tornado.web
import tornado.httpserver

from random import randint
from tornado.options import options
from commons import JsonHandler, JsonHelloWorldHandler, PlaintextHelloWorldHandler, HtmlHandler
from tornado.ioloop import IOLoop

IOLoop.configure('tornado.platform.asyncio.AsyncIOLoop')


options.define('port', default=8888, type=int, help="Server port")
options.define('mongo', default='localhost', type=str, help="MongoDB host")
options.define('backlog', default=8192, type=int, help="Server backlog")


class SingleQueryHandler(JsonHandler):

    async def get(self):
        world = await db.world.find_one(randint(1, 10000))
        world = {self.ID: int(world['_id']),
                 self.RANDOM_NUMBER: int(world[self.RANDOM_NUMBER])
                 }

        response = json.dumps(world)
        self.finish(response)
Esempio n. 57
0
def parse_command_line():
    options.define("port", help="run server on a specific port", type=int)
    options.define("log_console", help="print log to console", type=bool)
    options.define("log_file", help="print log to file", type=bool)
    options.define("log_file_path", help="path of log_file", type=str)
    options.define("log_level", help="level of logging", type=str)
    # 集群中最好有且仅有一个实例为master,一般用于执行全局的定时任务
    options.define("master",
                   help="is master node? (true:master / false:slave)",
                   type=bool)
    # sqlalchemy engine_url, 例如pgsql 'postgresql+psycopg2://mhq:1qaz2wsx@localhost:5432/blog'
    options.define("engine_url", help="engine_url for sqlalchemy", type=str)
    # redis相关配置, 覆盖所有用到redis位置的配置
    options.define("redis_host", help="redis host e.g 127.0.0.1", type=str)
    options.define("redis_port", help="redis port e.g 6379", type=int)
    options.define("redis_password",
                   help="redis password set this option if has pwd ",
                   type=str)
    options.define("redis_db", help="redis db e.g 0", type=int)

    # 读取 项目启动时,命令行上添加的参数项
    options.logging = None  # 不用tornado自带的logging配置
    options.parse_command_line()
    # 覆盖默认的config配置
    if options.port is not None:
        config['port'] = options.port
    if options.log_console is not None:
        config['log_console'] = options.log_console
    if options.log_file is not None:
        config['log_file'] = options.log_file
    if options.log_file_path is not None:
        config['log_file_path'] = options.log_file_path
    if options.log_level is not None:
        config['log_level'] = options.log_level
    if options.master is not None:
        config['master'] = options.master
    if options.engine_url is not None:
        config['database']['engine_url'] = options.engine_url
    if options.redis_host is not None:
        redis_session_config['host'] = options.redis_host
        site_cache_config['host'] = options.redis_host
        redis_pub_sub_config['host'] = options.redis_host
    if options.redis_port is not None:
        redis_session_config['port'] = options.redis_port
        site_cache_config['port'] = options.redis_port
        redis_pub_sub_config['port'] = options.redis_port
    if options.redis_password is not None:
        redis_session_config['password'] = options.redis_password
        site_cache_config['password'] = options.redis_password
        redis_pub_sub_config['password'] = options.redis_password
    if options.redis_db is not None:
        redis_session_config['db_no'] = options.redis_db
        site_cache_config['db_no'] = options.redis_db
Esempio n. 58
0
@gen.coroutine
def open_run_save(api, path):
    """open a notebook, run it, and save.
    
    Only the original notebook is saved, the output is not recorded.
    """
    nb = api.get_notebook(path)
    session = Session()
    kernel = yield api.new_kernel(session.session)
    try:
        yield run_notebook(nb, kernel, session)
    finally:
        api.kill_kernel(kernel['id'])
    gen_log.info("Saving %s/notebooks/%s", api.url, path)
    api.save_notebook(nb, path)

if __name__ == '__main__':
    enable_pretty_logging()
    options.define("url", default="http://localhost:8888",
        help="The base URL of the notebook server to test"
    )
    args = options.parse_command_line()
    paths = args or ['Untitled0.ipynb']
    
    api = NBAPI(url=options.url.rstrip('/'))
    loop = IOLoop.current()
    
    for path in paths:
        gen_log.info("Running %s/notebooks/%s", api.url, path)
        loop.run_sync(lambda : open_run_save(api, path))
Esempio n. 59
0
import tornado.gen
import tornado.ioloop
import tornado.iostream
import tornado.tcpserver
import tornado.web
from tornado.options import options

import log
import tornado_h2.http2server as th2
from tornado_h2.http2_web import HTTP2StaticFileHandler

logger = logging.getLogger('tornado.application')

options.define("address",
               default='0.0.0.0',
               help="IP address to attach the server to",
               type=str)
options.define("port",
               default=os.environ.get('PORT', 8888),
               help="Port number to run the server on",
               type=int)
options.define("https",
               default=True,
               help="Start application with HTTPS?",
               type=bool)
options.define("debug",
               default=True,
               help="Start application in debug mode?",
               type=bool)