def main(): global opts parser = get_parser() opts, args = parser.parse_args() load_config(opts.configfile) signal.signal(signal.SIGINT, sigterm_handler) signal.signal(signal.SIGTERM, sigterm_handler) if opts.foreground: log_to_stream(sys.stderr, level=logging.DEBUG) else: log_to_syslog('beakerd') pid_file = opts.pid_file if pid_file is None: pid_file = config.get("PID_FILE", "/var/run/beaker/beakerd.pid") d = daemon.DaemonContext(pidfile=pidfile.TimeoutPIDLockFile(pid_file, acquire_timeout=0), detach_process=True) try: d.open() except pidlockfile.AlreadyLocked: log.fatal("could not acquire lock on %s, exiting" % pid_file) sys.stderr.write("could not acquire lock on %s" % pid_file) sys.exit(1) schedule()
def setup_package(): assert os.path.exists( _config_file), 'Config file %s must exist' % _config_file load_config(configfile=_config_file) log_to_stream(sys.stdout, level=logging.DEBUG) workaround_sqlite_begin() data_setup.setup_model()
def setup_package(): assert os.path.exists( _config_file), 'Config file %s must exist' % _config_file load_config(configfile=_config_file) log_to_stream(sys.stdout, level=logging.DEBUG) get_engine() metadata.create_all()
def main(): parser = get_parser() opts, args = parser.parse_args() configfile = opts.configfile xml_file = opts.productfile load_config(configfile) log_to_stream(sys.stderr) update_products(xml_file)
def main(): parser = get_parser() opts, args = parser.parse_args() load_config(opts.configfile) log_to_stream(sys.stderr) init_db(user_name=opts.user_name, password=opts.password, user_display_name=opts.display_name, user_email_address=opts.email_address)
def reload_config(): purge_handlers() if interface.running: interface.stop() load_config(opts.configfile) interface.start(config)
def setup_package(): assert os.path.exists(CONFIG_FILE), 'Config file %s must exist' % CONFIG_FILE load_config(configfile=CONFIG_FILE) log_to_stream(sys.stdout, level=logging.DEBUG) from bkr.inttest import data_setup if not 'BEAKER_SKIP_INIT_DB' in os.environ: data_setup.setup_model() with session.begin(): data_setup.create_labcontroller() #always need a labcontroller data_setup.create_task(name=u'/distribution/install', requires= u'make gcc nfs-utils wget procmail redhat-lsb ntp ' u'@development-tools @development-libs @development ' u'@desktop-platform-devel @server-platform-devel ' u'libxml2-python expect pyOpenSSL'.split()) data_setup.create_task(name=u'/distribution/reservesys', requires=u'emacs vim-enhanced unifdef sendmail'.split()) data_setup.create_task(name=u'/distribution/utils/dummy') data_setup.create_task(name=u'/distribution/inventory') data_setup.create_distro() if not os.path.exists(turbogears.config.get('basepath.rpms')): os.mkdir(turbogears.config.get('basepath.rpms')) setup_slapd() turbogears.testutil.make_app(Root) turbogears.testutil.start_server() global processes processes = [] if 'BEAKER_SERVER_BASE_URL' not in os.environ: # need to start the server ourselves # Usual pkg_resources ugliness is needed to ensure gunicorn doesn't # import pkg_resources before we get a chance to specify our # requirements in bkr.server.wsgi processes.extend([ Process('gunicorn', args=[sys.executable, '-c', '__requires__ = ["CherryPy < 3.0"]; import pkg_resources; ' \ 'from gunicorn.app.wsgiapp import run; run()', '--bind', ':%s' % turbogears.config.get('server.socket_port'), '--workers', '8', '--access-logfile', '-', '--preload', 'bkr.server.wsgi:application'], listen_port=turbogears.config.get('server.socket_port')), ]) processes.extend([ Process('slapd', args=['slapd', '-d0', '-F' + slapd_config_dir, '-hldap://127.0.0.1:3899/'], listen_port=3899, stop_signal=signal.SIGINT), ]) try: for process in processes: process.start() except: for process in processes: process.stop() raise
def main(): parser = get_parser() opts,args = parser.parse_args() configfile = opts.configfile baseurl = opts.baseurl load_config(configfile) log_to_stream(sys.stderr) if opts.basepath: basepath = opts.basepath else: basepath = get("basepath.harness", "/var/www/beaker/harness") update_repos(baseurl=baseurl, basepath=basepath)
def start(): """Start the CherryPy application server.""" if len(sys.argv) > 1: load_config(sys.argv[1]) else: load_config() # If rlimit_as is defined in the config file then set the limit here. if turbogears.config.get('rlimit_as'): resource.setrlimit(resource.RLIMIT_AS, (turbogears.config.get('rlimit_as'), turbogears.config.get('rlimit_as'))) from bkr.server.controllers import Root turbogears.start_server(Root())
def main(): parser = OptionParser('usage: %prog [options]', description=__description__, version=__version__) parser.add_option('-c', '--config', metavar='FILENAME', help='Read configuration from FILENAME') parser.add_option('-k', '--keep', metavar='DAYS', help='Keep records which expired less than DAYS ago [default: %default]') parser.set_defaults(keep=7) options, args = parser.parse_args() from bkr.server.util import load_config, log_to_stream load_config(options.config) log_to_stream(sys.stderr) cleanup_visits(options.keep)
def main(): current_date = None parser = get_parser() opts, args = parser.parse_args() threshold = opts.threshold reservation_type = opts.reservation_type.decode(sys.stdin.encoding or "utf8") testing = opts.testing configfile = opts.configfile load_config(configfile) log_to_stream(sys.stderr) interface.start(config) get_engine() if testing: print "Dry run only, nothing will be sent\n" identify_nags(threshold, reservation_type, testing)
def main(): parser = get_parser() opts,args = parser.parse_args() configfile = opts.configfile baseurl = opts.baseurl # The base URL is always a directory with a trailing slash if not baseurl.endswith('/'): baseurl += '/' load_config(configfile) log_to_stream(sys.stderr, level=logging.DEBUG if opts.debug else logging.WARNING) if opts.basepath: basepath = opts.basepath else: basepath = get("basepath.harness", "/var/www/beaker/harness") sys.exit(update_repos(baseurl=baseurl, basepath=basepath))
def __init__(self, remote): # setup, sanity checks self.task_dir = turbogears.config.get("basepath.rpms", "/var/www/beaker/rpms") self._setup_logging() self._check_perms() # Initialize core attributes self.remote = remote remote_proxy = self._get_server_proxy(self.remote) self.proxy={'remote':remote_proxy, } self.tasks_added = [] self.t_downloaded = 0 self.tasks = Tasks() # load configuration data load_config()
def __init__(self, remote=None): # load configuration data load_config() # setup, sanity checks self.task_dir = turbogears.config.get("basepath.rpms") self._setup_logging() # Initialize core attributes if remote: self.remote = remote.rstrip("/") self.proxy = xmlrpclib.ServerProxy(self.remote + '/RPC2') self.tasks_added = [] self.t_downloaded = 0 self.tasklib = TaskLibrary()
def __init__(self, remote=None): # load configuration data load_config() # setup, sanity checks self.task_dir = turbogears.config.get("basepath.rpms", "/var/www/beaker/rpms") self._setup_logging() # Initialize core attributes if remote: self.remote = remote.rstrip("/") self.proxy = xmlrpclib.ServerProxy(self.remote + '/RPC2') self.tasks_added = [] self.t_downloaded = 0 self.tasklib = TaskLibrary()
def main(): global opts parser = get_parser() opts, args = parser.parse_args() # First look on the command line for a desired config file, # if it's not on the command line, then look for 'setup.py' # in the current directory. If there, load configuration # from a file called 'dev.cfg'. If it's not there, the project # is probably installed and we'll look first for a file called # 'prod.cfg' in the current directory and then for a default # config file called 'default.cfg' packaged in the egg. load_config(opts.configfile) signal.signal(signal.SIGHUP, sighup_handler) signal.signal(signal.SIGINT, sigterm_handler) signal.signal(signal.SIGTERM, sigterm_handler) if not opts.foreground: log.debug("Launching beakerd daemon") pid_file = opts.pid_file if pid_file is None: pid_file = config.get("PID_FILE", "/var/run/beaker/beakerd.pid") d = daemon.DaemonContext(pidfile=pidfile.TimeoutPIDLockFile(pid_file, acquire_timeout=0),) util_logger = logging.getLogger('bkr.server.util') util_logger.disabled = True purge_handlers() try: d.open() except pidlockfile.AlreadyLocked: reload_config() # reopen logfiles log.fatal("could not acquire lock on %s, exiting" % pid_file) sys.stderr.write("could not acquire lock on %s" % pid_file) sys.exit(1) else: signal.signal(signal.SIGHUP, sighup_handler) signal.signal(signal.SIGTERM, sigterm_handler) signal.signal(signal.SIGINT, sigterm_handler) schedule()
def main(): parser = optparse.OptionParser('usage: %prog [options]', description=__description__, version=__version__) parser.add_option('-c', '--config', metavar='FILENAME', help='Read configuration from FILENAME') parser.add_option('--debug', action='store_true', help='Print debugging messages to stderr') parser.set_defaults(debug=False) options, args = parser.parse_args() load_config(options.config) log_to_stream(sys.stderr, level=logging.DEBUG if options.debug else logging.WARNING) # beaker-server ships a cron job to run this command, so exit silently # and quickly if LDAP is not actually enabled. if not config.get('identity.ldap.enabled', False): return 0 refresh_ldap() return 0
def main(argv=None): parser = OptionParser('usage: %prog [options]', description='Permanently deletes log files from Beaker and/or ' 'archive server', version=__version__) parser.add_option('-c', '--config', metavar='FILENAME', help='Read configuration from FILENAME') parser.add_option('-v', '--verbose', action='store_true', help='Print the path/URL of deleted files to stdout') parser.add_option('--debug', action='store_true', help='Print debugging messages to stderr') parser.add_option('--dry-run', action='store_true', help='Do not delete any files, and issue ROLLBACK instead of ' 'COMMIT after performing database operations') parser.add_option('--limit', default=None, type='int', help='Set a limit on the number of jobs whose logs will be deleted') parser.set_defaults(verbose=False, debug=False, dry_run=False) options, args = parser.parse_args(argv) load_config(options.config) # urllib3 installs a NullHandler, we can just remove it and let the messages propagate logging.getLogger('requests.packages.urllib3').handlers[:] = [] log_to_stream(sys.stderr, level=logging.DEBUG if options.debug else logging.WARNING) return log_delete(options.verbose, options.dry_run, options.limit)
from turbogears import config from turbogears.database import session import cherrypy import cherrypy._cpwsgi from cherrypy.filters.basefilter import BaseFilter from flask import Flask from bkr.common import __version__ from bkr.server import identity, assets from bkr.server.app import app log = logging.getLogger(__name__) # Load config. from bkr.log import log_to_stream from bkr.server.util import load_config load_config() log_to_stream(sys.stderr, level=logging.DEBUG) # Keep the code before the imports, otherwise we'll end up with function names # not marked as executed (see: Coverage.py FAQ) if config.get('coverage', False): import coverage import atexit log.debug('Starting coverage analysis') cov = coverage.coverage(data_suffix=True, cover_pylib=False, timid=True, omit=['*.kid']) cov.start()
def setup_package(): assert os.path.exists( CONFIG_FILE), 'Config file %s must exist' % CONFIG_FILE load_config(configfile=CONFIG_FILE) log_to_stream(sys.stdout, level=logging.DEBUG) from bkr.inttest import data_setup if not 'BEAKER_SKIP_INIT_DB' in os.environ: data_setup.setup_model() with session.begin(): data_setup.create_labcontroller() #always need a labcontroller data_setup.create_task( name=u'/distribution/install', requires=u'make gcc nfs-utils wget procmail redhat-lsb ntp ' u'@development-tools @development-libs @development ' u'@desktop-platform-devel @server-platform-devel ' u'libxml2-python expect pyOpenSSL'.split()) data_setup.create_task( name=u'/distribution/reservesys', requires=u'emacs vim-enhanced unifdef sendmail'.split()) data_setup.create_task(name=u'/distribution/utils/dummy') data_setup.create_task(name=u'/distribution/inventory') data_setup.create_distro() if not os.path.exists(turbogears.config.get('basepath.rpms')): os.mkdir(turbogears.config.get('basepath.rpms')) setup_slapd() turbogears.testutil.make_app(Root) turbogears.testutil.start_server() global processes processes = [] if 'BEAKER_SERVER_BASE_URL' not in os.environ: # need to start the server ourselves # Usual pkg_resources ugliness is needed to ensure gunicorn doesn't # import pkg_resources before we get a chance to specify our # requirements in bkr.server.wsgi processes.extend([ Process('gunicorn', args=[sys.executable, '-c', '__requires__ = ["CherryPy < 3.0"]; import pkg_resources; ' \ 'from gunicorn.app.wsgiapp import run; run()', '--bind', ':%s' % turbogears.config.get('server.socket_port'), '--workers', '8', '--access-logfile', '-', '--preload', 'bkr.server.wsgi:application'], listen_port=turbogears.config.get('server.socket_port')), ]) processes.extend([ Process('slapd', args=[ 'slapd', '-d0', '-F' + slapd_config_dir, '-hldap://127.0.0.1:3899/' ], listen_port=3899, stop_signal=signal.SIGINT), ]) try: for process in processes: process.start() except: for process in processes: process.stop() raise
def setup_package(): assert os.path.exists( CONFIG_FILE), 'Config file %s must exist' % CONFIG_FILE load_config(configfile=CONFIG_FILE) log_to_stream(sys.stdout, level=logging.DEBUG) from bkr.inttest import data_setup if not 'BEAKER_SKIP_INIT_DB' in os.environ: data_setup.setup_model() with session.begin(): # Fill in the bare minimum data which Beaker assumes will always be present. # Note that this can be called multiple times (for example, the # beaker-server-redhat add-on package reuses this setup function). if not LabController.query.count(): data_setup.create_labcontroller() if not Task.query.count(): data_setup.create_task( name=u'/distribution/install', requires=u'make gcc nfs-utils wget procmail redhat-lsb ntp ' u'@development-tools @development-libs @development ' u'@desktop-platform-devel @server-platform-devel ' u'libxml2-python expect pyOpenSSL'.split()) data_setup.create_task(name=u'/distribution/check-install') data_setup.create_task( name=u'/distribution/reservesys', requires=u'emacs vim-enhanced unifdef sendmail'.split()) data_setup.create_task(name=u'/distribution/utils/dummy') data_setup.create_task(name=u'/distribution/inventory') if not Distro.query.count(): # The 'BlueShoeLinux5-5' string appears in many tests, because it's # the distro name used in complete-job.xml. data_setup.create_distro_tree(osmajor=u'BlueShoeLinux5', distro_name=u'BlueShoeLinux5-5') if os.path.exists(turbogears.config.get('basepath.rpms')): # Remove any task RPMs left behind by previous test runs for entry in os.listdir(turbogears.config.get('basepath.rpms')): shutil.rmtree(os.path.join(turbogears.config.get('basepath.rpms'), entry), ignore_errors=True) else: os.mkdir(turbogears.config.get('basepath.rpms')) setup_slapd() mail_capture_thread.start() if turbogears.config.get('openstack.identity_api_url'): setup_openstack() turbogears.testutil.make_app(Root) turbogears.testutil.start_server() global processes processes = [] if 'BEAKER_SERVER_BASE_URL' not in os.environ: # need to start the server ourselves # Usual pkg_resources ugliness is needed to ensure gunicorn doesn't # import pkg_resources before we get a chance to specify our # requirements in bkr.server.wsgi processes.extend([ Process('gunicorn', args=[sys.executable, '-c', '__requires__ = ["CherryPy < 3.0"]; import pkg_resources; ' \ 'from gunicorn.app.wsgiapp import run; run()', '--bind', ':%s' % turbogears.config.get('server.socket_port'), '--workers', '8', '--access-logfile', '-', '--preload', 'bkr.server.wsgi:application'], listen_port=turbogears.config.get('server.socket_port')), ]) processes.extend([ Process('slapd', args=[ 'slapd', '-d0', '-F' + slapd_config_dir, '-hldap://127.0.0.1:3899/' ], listen_port=3899, stop_signal=signal.SIGINT), ]) try: for process in processes: process.start() except: for process in processes: process.stop() raise
import logging from turbogears import config from turbogears.database import session import cherrypy import cherrypy._cpwsgi from cherrypy.filters.basefilter import BaseFilter from flask import Flask from bkr.server import identity, assets from bkr.server.app import app log = logging.getLogger(__name__) # Load config. from bkr.log import log_to_stream from bkr.server.util import load_config load_config() log_to_stream(sys.stderr, level=logging.DEBUG) application = app # Register all routes. import bkr.server.activity import bkr.server.user import bkr.server.group import bkr.server.systems import bkr.server.reserve_workflow import bkr.server.pools import bkr.server.jobs import bkr.server.recipes @app.before_first_request
def main(*args): parser = optparse.OptionParser('usage: %prog [options]', description=__description__, version=__version__) parser.add_option('-u', '--user', metavar='USERNAME', help='The user we are creating a kickstart for', default='admin') parser.add_option('-r', '--recipe-id', metavar='ID', help='Recreate kickstart based on recipe ID') parser.add_option('-d', '--distro-tree-id', metavar='ID', help='Recreate kickstart based on distro ID') parser.add_option('-t', '--template-dir', metavar='DIR', help='Retrieve templates from DIR') parser.add_option('-f', '--system', metavar='FQDN', help='Generate kickstart for system identified by FQDN') parser.add_option('-m', '--ks-meta', metavar='OPTIONS', help='Kickstart meta data') parser.add_option('-p', '--kernel-options-post', metavar='OPTIONS', help='Kernel options post') options, args = parser.parse_args(*args) ks_meta = options.ks_meta koptions_post = options.kernel_options_post template_dir = options.template_dir if template_dir: add_to_template_searchpath(template_dir) if not options.recipe_id: if not options.distro_tree_id and not options.system: parser.error('Must specify either a recipe or a distro tree and system') elif not options.distro_tree_id: parser.error('Must specify a distro tree id when passing in a system') elif not options.system: parser.error('Must specify a system when not specifying a recipe') load_config() with session.begin(): user = User.by_user_name(options.user) ks_appends = None recipe = None distro_tree = None system = None install_options = None if options.distro_tree_id: try: distro_tree = DistroTree.by_id(options.distro_tree_id) except NoResultFound: raise RuntimeError("Distro tree id '%s' does not exist" % options.distro_tree_id) if options.system: fqdn = options.system try: system = System.by_fqdn(fqdn, user) except NoResultFound: raise RuntimeError("System '%s' does not exist" % fqdn) if distro_tree and not options.recipe_id: install_options = system.install_options(distro_tree).combined_with( InstallOptions.from_strings(ks_meta, None, koptions_post)) if options.recipe_id: try: recipe = Recipe.by_id(options.recipe_id) except NoResultFound: raise RuntimeError("Recipe id '%s' does not exist" % options.recipe_id) if not recipe.resource and not options.system: raise RuntimeError('Recipe must have (or had) a resource' ' assigned to it') if not system: system = getattr(recipe.resource, 'system', None) if not distro_tree: distro_tree = recipe.distro_tree install_options = system.install_options(distro_tree)\ .combined_with(recipe.generated_install_options())\ .combined_with(InstallOptions.from_strings(recipe.ks_meta, recipe.kernel_options, recipe.kernel_options_post))\ .combined_with(InstallOptions.from_strings(ks_meta, None, koptions_post)) ks_appends = [ks_append.ks_append for ks_append \ in recipe.ks_appends] user = recipe.recipeset.job.owner # Render the kickstart rendered_kickstart = generate_kickstart(install_options, distro_tree=distro_tree, system=system, user=user, recipe=recipe, ks_appends=ks_appends) kickstart = rendered_kickstart.kickstart print kickstart
def setup_package(): assert os.path.exists(_config_file), "Config file %s must exist" % _config_file load_config(configfile=_config_file) log_to_stream(sys.stdout, level=logging.DEBUG) get_engine() metadata.create_all()