def standalone(): """ Initializes Tornado and our application. Forks worker processes to handle requests. Does not return until all child processes exit normally. """ # Parse arguments parser = argparse.ArgumentParser(description="Homer web server") parser.add_argument("--background", action="store_true", help="Detach and run server in background") parser.add_argument("--worker-processes", default=1, type=int) parser.add_argument("--shared-http-fd", default=None, type=int) parser.add_argument("--process-id", default=0, type=int) args = parser.parse_args() # We don't initialize logging until we fork because we want each child to # have its own logging and it's awkward to reconfigure logging that is # defined by the parent. application = create_application() if args.background: # Get a new logfile, rotating the old one if present. err_log_name = os.path.join(settings.LOGS_DIR, settings.LOG_FILE_PREFIX + "-err.log") try: os.rename(err_log_name, err_log_name + ".old") except OSError: pass # Fork into background. utils.daemonize(err_log_name) # Drop a pidfile. pid = os.getpid() with open(settings.PID_FILE, "w") as pidfile: pidfile.write(str(pid) + "\n") utils.install_sigusr1_handler(settings.LOG_FILE_PREFIX) # Setup logging logging_config.configure_logging(args.process_id) # setup accumulators and counters for statistics gathering api.base.setupStats(args.process_id, args.worker_processes) if args.shared_http_fd: reactor.adoptStreamPort(args.shared_http_fd, AF_INET, application) else: # Cyclone _log.info("Going to listen for HTTP on port %s", settings.HTTP_PORT) http_port = reactor.listenTCP(settings.HTTP_PORT, application, interface=settings.LOCAL_IP) for process_id in range(1, args.worker_processes): reactor.spawnProcess(None, executable, [executable, __file__, "--shared-http-fd", str(http_port.fileno()), "--process-id", str(process_id)], childFDs={0: 0, 1: 1, 2: 2, http_port.fileno(): http_port.fileno()}, env = os.environ) # Kick off the reactor to start listening on configured ports reactor.run()
def standalone(args): global io_loop global server_prefix global get_rate configure_logging("stresstool") logging.getLogger().setLevel(logging.INFO) logging.getLogger("crest").setLevel(logging.DEBUG) _log.info("Starting stress against %s", args.server) server_prefix = "http://%s" % args.server get_rate = args.rate io_loop = IOLoop.instance() io_loop.add_callback(simulate_xdm_gets) io_loop.add_callback(print_histograms) io_loop.start()
import logging import atexit from sys import argv, executable from socket import AF_INET import cyclone.options import cyclone.web from telephus.protocol import ManagedCassandraClientFactory from twisted.internet import ssl, reactor from metaswitch.crest import api from metaswitch.crest import settings, logging_config from metaswitch.common import utils _log = logging.getLogger("crest") logging_config.configure_logging(0) def create_application(): app_settings = { "gzip": True, "cookie_secret": settings.COOKIE_SECRET, "debug": settings.CYCLONE_DEBUG, } application = cyclone.web.Application(api.get_routes(), **app_settings) application.cassandra_factory = ManagedCassandraClientFactory( settings.CASS_KEYSPACE) reactor.connectTCP(settings.CASS_HOST, settings.CASS_PORT, application.cassandra_factory) # Initialize all modules
# For each entry in the SIP_DIGESTS table, create entries in the # public_ids and private_ids tables that contain the mapping # private_id:<xxx> - public_id:<sip:xxx> - this is what earlier versions # of Clearwater simulated but did not store in the database. c.execute("SELECT private_id from %s;" % config.SIP_DIGESTS_TABLE) private_ids = [] while True: row = c.fetchone() if row == None: break private_ids.append(row[0]) print ("List of private IDs: %s" % private_ids) for priv in private_ids: pub = "sip:" + priv print ("Inserting private/public ID pair: %s/%s" % (priv, pub)) try: c.execute("INSERT INTO %s (public_id, '%s') values ('%s', '%s');" % (config.PRIVATE_IDS_TABLE, priv, pub, priv)) c.execute("INSERT INTO %s (private_id, '%s') values ('%s', '%s');" % (config.PUBLIC_IDS_TABLE, pub, priv, pub)) except Exception: _log.exception("Failed to insert private/public ID pair: %s/%s" % (priv, pub)) pass print "Done." c.close() if __name__ == '__main__': logging_config.configure_logging("upgrade_homestead_db") standalone()
# "OpenSSL Licenses" means the OpenSSL License and Original SSLeay License # under which the OpenSSL Project distributes the OpenSSL toolkit software, # as those licenses appear in the file LICENSE-OPENSSL. import logging from metaswitch.crest import logging_config from metaswitch.crest.tools import connection from metaswitch.crest.api import get_create_statements _log = logging.getLogger("crest.create_db") def standalone(): c = connection.cursor() create_statements = get_create_statements() print "Create statements: ", create_statements for cs in create_statements: try: print "executing %s" % cs c.execute(cs) except Exception: _log.exception("Failed to create table") pass print "Done." c.close() if __name__ == '__main__': logging_config.configure_logging("create_db") standalone()
import logging import atexit from sys import argv, executable from socket import AF_INET import cyclone.options import cyclone.web from telephus.protocol import ManagedCassandraClientFactory from twisted.internet import ssl, reactor from metaswitch.crest import api from metaswitch.crest import settings, logging_config from metaswitch.common import utils _log = logging.getLogger("crest") logging_config.configure_logging(0) def create_application(): app_settings = { "gzip": True, "cookie_secret": settings.COOKIE_SECRET, "debug": settings.CYCLONE_DEBUG, } application = cyclone.web.Application(api.get_routes(), **app_settings) application.cassandra_factory = ManagedCassandraClientFactory(settings.CASS_KEYSPACE) reactor.connectTCP(settings.CASS_HOST, settings.CASS_PORT, application.cassandra_factory) # Initialize all modules api.initialize(application) return application
# under which the OpenSSL Project distributes the OpenSSL toolkit software, # as those licenses appear in the file LICENSE-OPENSSL. import logging from metaswitch.crest import logging_config from metaswitch.crest.tools import connection from metaswitch.crest.api import get_create_statements _log = logging.getLogger("crest.create_db") def standalone(): c = connection.cursor() create_statements = get_create_statements() print "Create statements: ", create_statements for cs in create_statements: try: print "executing %s" % cs c.execute(cs) except Exception: _log.exception("Failed to create table") pass print "Done." c.close() if __name__ == '__main__': logging_config.configure_logging("create_db") standalone()