def main(): global opts parser = get_parser() opts, args = parser.parse_args() load_config_or_exit(opts.configfile) signal.signal(signal.SIGINT, sigterm_handler) signal.signal(signal.SIGTERM, sigterm_handler) if opts.foreground: log_to_stream(sys.stderr, level=logging.DEBUG) else: log_to_syslog('beakerd') pid_file = opts.pid_file if pid_file is None: pid_file = config.get("PID_FILE", "/var/run/beaker/beakerd.pid") d = daemon.DaemonContext(pidfile=pidfile.TimeoutPIDLockFile(pid_file, acquire_timeout=0), detach_process=True) try: d.open() except pidlockfile.AlreadyLocked: log.fatal("could not acquire lock on %s, exiting" % pid_file) sys.stderr.write("could not acquire lock on %s" % pid_file) sys.exit(1) schedule()
def setup_package(): assert os.path.exists( _config_file), 'Config file %s must exist' % _config_file load_config(configfile=_config_file) log_to_stream(sys.stdout, level=logging.DEBUG) workaround_sqlite_begin() data_setup.setup_model()
def main(): parser = get_parser() opts, args = parser.parse_args() load_config_or_exit(opts.configfile) if opts.check and opts.background: parser.error( '--check --background makes no sense, how will you know the result?' ) if not opts.background: log_to_stream(sys.stderr, level=logging.DEBUG if opts.debug else logging.WARNING) return doit(opts) else: pidlockfile = PIDLockFile(PIDFILE) existing_pid = pidlockfile.read_pid() if existing_pid: if process_is_alive(existing_pid): sys.stderr.write( 'Another beaker-init process is running (pid %s)\n' % existing_pid) return 1 else: sys.stderr.write('Pid file %s exists but pid %s is dead, ' 'removing the pid file\n' % (PIDFILE, existing_pid)) pidlockfile.break_lock() with daemon.DaemonContext(pidfile=pidlockfile, detach_process=True): log_to_syslog('beaker-init') return doit(opts)
def main(): parser = get_parser() opts, args = parser.parse_args() if not opts.productfile and not opts.producturl: parser.error( 'Specify product data to load using --product-file or --product-url' ) load_config_or_exit(opts.configfile) log_to_stream(sys.stderr) if opts.productfile: xml_file = open(opts.productfile, 'rb') update_products_from_xml(xml_file) elif opts.producturl: response = requests.get(opts.producturl, stream=True, headers=dict(Accept='application/%s' % opts.producturl_header)) response.raise_for_status() mimetype, options = cgi.parse_header(response.headers['Content-Type']) if mimetype in ['text/xml', 'application/xml']: update_products_from_xml(response.raw) elif mimetype == 'application/json': update_products_from_json(response.raw) else: raise ValueError('Resource at %s is %s, should be XML or JSON' % (opts.producturl, mimetype))
def main(argv=None): parser = OptionParser('usage: %prog [options]', description='Permanently deletes log files from Beaker and/or ' 'archive server', version=__version__) parser.add_option('-c', '--config', metavar='FILENAME', help='Read configuration from FILENAME') parser.add_option('-v', '--verbose', action='store_true', help='Print the path/URL of deleted files to stdout') parser.add_option('--debug', action='store_true', help='Print debugging messages to stderr') parser.add_option('--dry-run', action='store_true', help='Do not delete any files, and issue ROLLBACK instead of ' 'COMMIT after performing database operations') parser.add_option('--limit', default=None, type='int', help='Set a limit on the number of jobs whose logs will be deleted') parser.set_defaults(verbose=False, debug=False, dry_run=False) options, args = parser.parse_args(argv) load_config_or_exit(options.config) # urllib3 installs a NullHandler, we can just remove it and let the messages propagate logging.getLogger('requests.packages.urllib3').handlers[:] = [] log_to_stream(sys.stderr, level=logging.DEBUG if options.debug else logging.WARNING) return log_delete(options.verbose, options.dry_run, options.limit)
def main(): parser = OptionParser() parser.add_option("-c", "--config", help="Full path to config file to use") parser.add_option("-f", "--foreground", default=False, action="store_true", help="run in foreground (do not spawn a daemon)") parser.add_option("-p", "--pid-file", help="specify a pid file") (opts, args) = parser.parse_args() if opts.config: load_conf(opts.config) logging.getLogger().setLevel(logging.DEBUG) conf = get_conf() pid_file = opts.pid_file if pid_file is None: pid_file = conf.get( "WATCHDOG_PID_FILE", "/var/run/beaker-lab-controller/beaker-watchdog.pid") # HubProxy will try to log some stuff, even though we # haven't configured our logging handlers yet. So we send logs to stderr # temporarily here, and configure it again below. log_to_stream(sys.stderr, level=logging.WARNING) try: watchdog = Watchdog(conf=conf) except Exception, ex: sys.stderr.write("Error starting beaker-watchdog: %s\n" % ex) sys.exit(1)
def setup_package(): assert os.path.exists( _config_file), 'Config file %s must exist' % _config_file load_config(configfile=_config_file) log_to_stream(sys.stdout, level=logging.DEBUG) get_engine() metadata.create_all()
def main(): parser = OptionParser() parser.add_option("-c", "--config", help="Full path to config file to use") parser.add_option("-f", "--foreground", default=False, action="store_true", help="run in foreground (do not spawn a daemon)") parser.add_option("-p", "--pid-file", help="specify a pid file") (opts, args) = parser.parse_args() if opts.config: load_conf(opts.config) conf = get_conf() logging.getLogger().setLevel(logging.DEBUG) pid_file = opts.pid_file if pid_file is None: pid_file = conf.get("PROXY_PID_FILE", "/var/run/beaker-lab-controller/beaker-proxy.pid") # HubProxy will try to log some stuff, even though we # haven't configured our logging handlers yet. So we send logs to stderr # temporarily here, and configure it again below. log_to_stream(sys.stderr, level=logging.WARNING) try: proxy = Proxy(conf=conf) except Exception, ex: sys.stderr.write("Error initializing Proxy: %s\n" % ex) sys.exit(1)
def main(): parser = get_parser() opts, args = parser.parse_args() load_config(opts.configfile) log_to_stream(sys.stderr) init_db(user_name=opts.user_name, password=opts.password, user_display_name=opts.display_name, user_email_address=opts.email_address)
def main(): parser = get_parser() opts,args = parser.parse_args() configfile = opts.configfile xml_file = opts.productfile load_config_or_exit(configfile) log_to_stream(sys.stderr) update_products(xml_file)
def main(): parser = get_parser() opts, args = parser.parse_args() configfile = opts.configfile xml_file = opts.productfile load_config_or_exit(configfile) log_to_stream(sys.stderr) update_products(xml_file)
def setup_package(): assert os.path.exists(CONFIG_FILE), 'Config file %s must exist' % CONFIG_FILE load_config(configfile=CONFIG_FILE) log_to_stream(sys.stdout, level=logging.DEBUG) from bkr.inttest import data_setup if not 'BEAKER_SKIP_INIT_DB' in os.environ: data_setup.setup_model() with session.begin(): data_setup.create_labcontroller() #always need a labcontroller data_setup.create_task(name=u'/distribution/install', requires= u'make gcc nfs-utils wget procmail redhat-lsb ntp ' u'@development-tools @development-libs @development ' u'@desktop-platform-devel @server-platform-devel ' u'libxml2-python expect pyOpenSSL'.split()) data_setup.create_task(name=u'/distribution/reservesys', requires=u'emacs vim-enhanced unifdef sendmail'.split()) data_setup.create_task(name=u'/distribution/utils/dummy') data_setup.create_task(name=u'/distribution/inventory') data_setup.create_distro() if not os.path.exists(turbogears.config.get('basepath.rpms')): os.mkdir(turbogears.config.get('basepath.rpms')) setup_slapd() turbogears.testutil.make_app(Root) turbogears.testutil.start_server() global processes processes = [] if 'BEAKER_SERVER_BASE_URL' not in os.environ: # need to start the server ourselves # Usual pkg_resources ugliness is needed to ensure gunicorn doesn't # import pkg_resources before we get a chance to specify our # requirements in bkr.server.wsgi processes.extend([ Process('gunicorn', args=[sys.executable, '-c', '__requires__ = ["CherryPy < 3.0"]; import pkg_resources; ' \ 'from gunicorn.app.wsgiapp import run; run()', '--bind', ':%s' % turbogears.config.get('server.socket_port'), '--workers', '8', '--access-logfile', '-', '--preload', 'bkr.server.wsgi:application'], listen_port=turbogears.config.get('server.socket_port')), ]) processes.extend([ Process('slapd', args=['slapd', '-d0', '-F' + slapd_config_dir, '-hldap://127.0.0.1:3899/'], listen_port=3899, stop_signal=signal.SIGINT), ]) try: for process in processes: process.start() except: for process in processes: process.stop() raise
def main(): parser = OptionParser(description=__description__, version=__version__) parser.add_option('-c', '--config-file') parser.add_option('--debug', action='store_true', help='Show detailed information about image creation') parser.add_option( '--no-upload', dest='upload', action='store_false', help='Skip uploading to Glance, leave image temp file on disk') parser.add_option('--os-username', help='OpenStack username') parser.add_option('--os-password', help='OpenStack password') parser.add_option('--os-tenant-name', help='OpenStack tenant name') parser.set_defaults(debug=False, upload=True) options, args = parser.parse_args() load_config_or_exit(options.config_file) log_to_stream(sys.stderr, level=logging.DEBUG if options.debug else logging.WARNING) if options.upload: # Get a Glance client. This seems more difficult than it should be... username = options.os_username or os.environ.get('OS_USERNAME') if not username: parser.error( 'Specify username with --os-username or env[OS_USERNAME]') password = options.os_password or os.environ.get('OS_PASSWORD') if not password: parser.error( 'Specify password with --os-password or env[OS_PASSWORD]') tenant_name = options.os_tenant_name or os.environ.get( 'OS_TENANT_NAME') if not tenant_name: parser.error( 'Specify tenant with --os-tenant-name or env[OS_TENANT_NAME]') auth_url = config.get('openstack.identity_api_url') if not auth_url: parser.error( 'OpenStack Identity API URL is not set in the configuration') log.debug('Authenticating to Keystone') keystone = keystoneclient.v2_0.client.Client(username=username, password=password, tenant_name=tenant_name, auth_url=auth_url) log.debug('Looking up Glance URL in service catalog') glance_url = keystone.service_catalog.url_for( service_type='image', endpoint_type='publicURL') log.debug('Using Glance URL %s', glance_url) glance = glanceclient.Client('1', endpoint=glance_url, token=keystone.auth_token) # Generate and upload the image. with session.begin(): upload_image(glance) else: print generate_image().name
def main(): parser = get_parser() opts,args = parser.parse_args() configfile = opts.configfile baseurl = opts.baseurl # The base URL is always a directory with a trailing slash if not baseurl.endswith('/'): baseurl += '/' load_config_or_exit(configfile) log_to_stream(sys.stderr, level=logging.DEBUG if opts.debug else logging.WARNING) if opts.basepath: basepath = opts.basepath else: basepath = get("basepath.harness") sys.exit(update_repos(baseurl=baseurl, basepath=basepath))
def main(): current_date = None parser = get_parser() opts,args = parser.parse_args() threshold = opts.threshold reservation_type = opts.reservation_type.decode(sys.stdin.encoding or 'utf8') testing = opts.testing configfile = opts.configfile load_config(configfile) log_to_stream(sys.stderr) interface.start(config) get_engine() if testing: print 'Dry run only, nothing will be sent\n' identify_nags(threshold, reservation_type, testing)
def main(): parser = OptionParser(description=__description__, version=__version__) parser.add_option("-c", "--config-file") parser.add_option("--debug", action="store_true", help="Show detailed information about image creation") parser.add_option( "--no-upload", dest="upload", action="store_false", help="Skip uploading to Glance, leave image temp file on disk", ) parser.add_option("--os-username", help="OpenStack username") parser.add_option("--os-password", help="OpenStack password") parser.add_option("--os-tenant-name", help="OpenStack tenant name") parser.set_defaults(debug=False, upload=True) options, args = parser.parse_args() load_config_or_exit(options.config_file) log_to_stream(sys.stderr, level=logging.DEBUG if options.debug else logging.WARNING) if options.upload: # Get a Glance client. This seems more difficult than it should be... username = options.os_username or os.environ.get("OS_USERNAME") if not username: parser.error("Specify username with --os-username or env[OS_USERNAME]") password = options.os_password or os.environ.get("OS_PASSWORD") if not password: parser.error("Specify password with --os-password or env[OS_PASSWORD]") tenant_name = options.os_tenant_name or os.environ.get("OS_TENANT_NAME") if not tenant_name: parser.error("Specify tenant with --os-tenant-name or env[OS_TENANT_NAME]") auth_url = config.get("openstack.identity_api_url") if not auth_url: parser.error("OpenStack Identity API URL is not set in the configuration") log.debug("Authenticating to Keystone") keystone = keystoneclient.v2_0.client.Client( username=username, password=password, tenant_name=tenant_name, auth_url=auth_url ) log.debug("Looking up Glance URL in service catalog") glance_url = keystone.service_catalog.url_for(service_type="image", endpoint_type="publicURL") log.debug("Using Glance URL %s", glance_url) glance = glanceclient.Client("1", endpoint=glance_url, token=keystone.auth_token) # Generate and upload the image. with session.begin(): upload_image(glance) else: print generate_image().name
def main(): parser = get_parser() opts, args = parser.parse_args() load_config_or_exit(opts.configfile) log_to_stream(sys.stderr, level=logging.DEBUG if opts.debug else logging.WARNING) from turbogears.database import metadata, bind_metadata bind_metadata() if opts.downgrade: downgrade_db(metadata, opts.downgrade) else: # if database is empty then initialize it if not metadata.bind.table_names(): init_db(metadata, user_name=opts.user_name, password=opts.password, user_display_name=opts.display_name, user_email_address=opts.email_address) else: # upgrade to the latest DB version upgrade_db(metadata)
def main(): log_to_stream(sys.stderr, level=logging.WARNING) global conf command_container = BeakerCommandContainer(conf=conf) formatter = IndentedHelpFormatter(max_help_position=60, width=120) parser = BeakerOptionParser(version=__version__, conflict_handler='resolve', command_container=command_container, default_command="help", formatter=formatter) # This is parser.run(), but with more sensible error handling cmd, cmd_opts, cmd_args = parser.parse_args() if not cmd_opts.hub and not conf: sys.stderr.write( "Configuration file not found. Please create an /etc/beaker/client.conf " "or ~/.beaker_client/config configuration file.\n") return 1 # Need to deal with the possibility that requests is not importable... try: import requests maybe_http_error = (requests.HTTPError, ) except ImportError: maybe_http_error = () try: return cmd.run(*cmd_args, **cmd_opts.__dict__) except krbV.Krb5Error, e: if e.args[0] == krbV.KRB5KRB_AP_ERR_TKT_EXPIRED: sys.stderr.write( 'Kerberos ticket expired (run kinit to obtain a new ticket)\n') return 1 elif e.args[0] == krbV.KRB5_FCC_NOFILE: sys.stderr.write( 'No Kerberos credential cache found (run kinit to create one)\n' ) return 1 else: raise
def main(): parser = optparse.OptionParser('usage: %prog [options]', description=__description__, version=__version__) parser.add_option('-c', '--config', metavar='FILENAME', help='Read configuration from FILENAME') parser.add_option('--debug', action='store_true', help='Print debugging messages to stderr') parser.set_defaults(debug=False) options, args = parser.parse_args() load_config(options.config) log_to_stream(sys.stderr, level=logging.DEBUG if options.debug else logging.WARNING) # beaker-server ships a cron job to run this command, so exit silently # and quickly if LDAP is not actually enabled. if not config.get('identity.ldap.enabled', False): return 0 refresh_ldap() return 0
def main(): parser = get_parser() opts, args = parser.parse_args() load_config_or_exit(opts.configfile) log_to_stream(sys.stderr, level=logging.DEBUG if opts.debug else logging.WARNING) from turbogears.database import metadata, bind_metadata bind_metadata() if opts.downgrade: downgrade_db(metadata, opts.downgrade) else: # if database is empty then initialize it if not metadata.bind.table_names(): if not opts.user_name: parser.error('Pass --user to create an admin user') init_db(metadata) else: # upgrade to the latest DB version upgrade_db(metadata) populate_db(opts.user_name, opts.password, opts.display_name, opts.email_address)
def main(): log_to_stream(sys.stderr, level=logging.WARNING) global conf command_container = BeakerCommandContainer(conf=conf) formatter = IndentedHelpFormatter(max_help_position=60, width=120) parser = BeakerOptionParser(version=__version__, conflict_handler='resolve', command_container=command_container, default_command="help", formatter=formatter) # This is parser.run(), but with more sensible error handling cmd, cmd_opts, cmd_args = parser.parse_args() if not cmd_opts.hub and not conf: sys.stderr.write("Configuration file not found. Please create an /etc/beaker/client.conf " "or ~/.beaker_client/config configuration file.\n") return 1 # Need to deal with the possibility that requests is not importable... try: import requests maybe_http_error = (requests.HTTPError,) except ImportError: maybe_http_error = () try: return cmd.run(*cmd_args, **cmd_opts.__dict__) except krbV.Krb5Error, e: if e.args[0] == krbV.KRB5KRB_AP_ERR_TKT_EXPIRED: sys.stderr.write('Kerberos ticket expired (run kinit to obtain a new ticket)\n') return 1 elif e.args[0] == krbV.KRB5_FCC_NOFILE: sys.stderr.write('No Kerberos credential cache found (run kinit to create one)\n') return 1 else: raise
def main(*args): parser = get_parser() (options, args) = parser.parse_args(*args) load_config_or_exit(options.configfile) log_to_stream(sys.stderr) interface.start(config) reservation_expiry = options.reservation_expiry reservation_length = options.reservation_length waiting_recipe_age = options.waiting_recipe_age delayed_job_age = options.delayed_job_age testing = options.testing if testing: print 'Dry run only, nothing will be sent\n' for user in User.query: beaker_usage = BeakerUsage(user, reservation_expiry, reservation_length, waiting_recipe_age, delayed_job_age) expiring_reservations = beaker_usage.expiring_reservations() open_in_demand_systems = beaker_usage.open_in_demand_systems() delayed_jobs = beaker_usage.delayed_jobs() if (expiring_reservations or open_in_demand_systems or delayed_jobs): data = { 'user_name': user.user_name, 'current_date': datetime.utcnow().strftime("%Y-%m-%d"), 'beaker_fqdn': absolute_url('/'), 'reservation_expiry': reservation_expiry, 'reservation_length': reservation_length, 'waiting_recipe_age': waiting_recipe_age, 'delayed_job_age': delayed_job_age, 'expiring_reservations': expiring_reservations, 'open_reservations': open_in_demand_systems, 'delayed_jobs': delayed_jobs } mail.send_usage_reminder(user, data, testing) return
def main(): parser = optparse.OptionParser('usage: %prog [options]', description=__description__, version=__version__) parser.add_option('-c', '--config', metavar='FILENAME', help='Read configuration from FILENAME') parser.add_option('--debug', action='store_true', help='Print debugging messages to stderr') parser.set_defaults(debug=False) options, args = parser.parse_args() load_config_or_exit(options.config) log_to_stream(sys.stderr, level=logging.DEBUG if options.debug else logging.WARNING) # beaker-server ships a cron job to run this command, so exit silently # and quickly if LDAP is not actually enabled. if not config.get('identity.ldap.enabled', False): return 0 refresh_ldap() return 0
def main(): parser = get_parser() opts, args = parser.parse_args() load_config_or_exit(opts.configfile) if opts.check and opts.background: parser.error('--check --background makes no sense, how will you know the result?') if not opts.background: log_to_stream(sys.stderr, level=logging.DEBUG if opts.debug else logging.WARNING) return doit(opts) else: pidlockfile = PIDLockFile(PIDFILE) existing_pid = pidlockfile.read_pid() if existing_pid: if process_is_alive(existing_pid): sys.stderr.write('Another beaker-init process is running (pid %s)\n' % existing_pid) return 1 else: sys.stderr.write('Pid file %s exists but pid %s is dead, ' 'removing the pid file\n' % (PIDFILE, existing_pid)) pidlockfile.break_lock() with daemon.DaemonContext(pidfile=pidlockfile, detach_process=True): log_to_syslog('beaker-init') return doit(opts)
from turbogears.database import session import cherrypy import cherrypy._cpwsgi from cherrypy.filters.basefilter import BaseFilter from flask import Flask from bkr.common import __version__ from bkr.server import identity, assets from bkr.server.app import app log = logging.getLogger(__name__) # Load config. from bkr.log import log_to_stream from bkr.server.util import load_config load_config() log_to_stream(sys.stderr, level=logging.DEBUG) # Keep the code before the imports, otherwise we'll end up with function names # not marked as executed (see: Coverage.py FAQ) if config.get('coverage', False): import coverage import atexit log.debug('Starting coverage analysis') cov = coverage.coverage(data_suffix=True, cover_pylib=False, timid=True, omit=['*.kid']) cov.start() def save_coverage():
def setup_package(): assert os.path.exists( CONFIG_FILE), 'Config file %s must exist' % CONFIG_FILE load_config(configfile=CONFIG_FILE) log_to_stream(sys.stdout, level=logging.DEBUG) from bkr.inttest import data_setup if not 'BEAKER_SKIP_INIT_DB' in os.environ: data_setup.setup_model() with session.begin(): data_setup.create_labcontroller() #always need a labcontroller data_setup.create_task( name=u'/distribution/install', requires=u'make gcc nfs-utils wget procmail redhat-lsb ntp ' u'@development-tools @development-libs @development ' u'@desktop-platform-devel @server-platform-devel ' u'libxml2-python expect pyOpenSSL'.split()) data_setup.create_task( name=u'/distribution/reservesys', requires=u'emacs vim-enhanced unifdef sendmail'.split()) data_setup.create_task(name=u'/distribution/utils/dummy') data_setup.create_task(name=u'/distribution/inventory') data_setup.create_distro() if not os.path.exists(turbogears.config.get('basepath.rpms')): os.mkdir(turbogears.config.get('basepath.rpms')) setup_slapd() turbogears.testutil.make_app(Root) turbogears.testutil.start_server() global processes processes = [] if 'BEAKER_SERVER_BASE_URL' not in os.environ: # need to start the server ourselves # Usual pkg_resources ugliness is needed to ensure gunicorn doesn't # import pkg_resources before we get a chance to specify our # requirements in bkr.server.wsgi processes.extend([ Process('gunicorn', args=[sys.executable, '-c', '__requires__ = ["CherryPy < 3.0"]; import pkg_resources; ' \ 'from gunicorn.app.wsgiapp import run; run()', '--bind', ':%s' % turbogears.config.get('server.socket_port'), '--workers', '8', '--access-logfile', '-', '--preload', 'bkr.server.wsgi:application'], listen_port=turbogears.config.get('server.socket_port')), ]) processes.extend([ Process('slapd', args=[ 'slapd', '-d0', '-F' + slapd_config_dir, '-hldap://127.0.0.1:3899/' ], listen_port=3899, stop_signal=signal.SIGINT), ]) try: for process in processes: process.start() except: for process in processes: process.stop() raise
def setup_package(): assert os.path.exists( CONFIG_FILE), 'Config file %s must exist' % CONFIG_FILE load_config(configfile=CONFIG_FILE) log_to_stream(sys.stdout, level=logging.DEBUG) from bkr.inttest import data_setup if not 'BEAKER_SKIP_INIT_DB' in os.environ: data_setup.setup_model() with session.begin(): # Fill in the bare minimum data which Beaker assumes will always be present. # Note that this can be called multiple times (for example, the # beaker-server-redhat add-on package reuses this setup function). if not LabController.query.count(): data_setup.create_labcontroller() if not Task.query.count(): data_setup.create_task( name=u'/distribution/install', requires=u'make gcc nfs-utils wget procmail redhat-lsb ntp ' u'@development-tools @development-libs @development ' u'@desktop-platform-devel @server-platform-devel ' u'libxml2-python expect pyOpenSSL'.split()) data_setup.create_task(name=u'/distribution/check-install') data_setup.create_task( name=u'/distribution/reservesys', requires=u'emacs vim-enhanced unifdef sendmail'.split()) data_setup.create_task(name=u'/distribution/utils/dummy') data_setup.create_task(name=u'/distribution/inventory') if not Distro.query.count(): # The 'BlueShoeLinux5-5' string appears in many tests, because it's # the distro name used in complete-job.xml. data_setup.create_distro_tree(osmajor=u'BlueShoeLinux5', distro_name=u'BlueShoeLinux5-5') if os.path.exists(turbogears.config.get('basepath.rpms')): # Remove any task RPMs left behind by previous test runs for entry in os.listdir(turbogears.config.get('basepath.rpms')): shutil.rmtree(os.path.join(turbogears.config.get('basepath.rpms'), entry), ignore_errors=True) else: os.mkdir(turbogears.config.get('basepath.rpms')) setup_slapd() mail_capture_thread.start() if turbogears.config.get('openstack.identity_api_url'): setup_openstack() turbogears.testutil.make_app(Root) turbogears.testutil.start_server() global processes processes = [] if 'BEAKER_SERVER_BASE_URL' not in os.environ: # need to start the server ourselves # Usual pkg_resources ugliness is needed to ensure gunicorn doesn't # import pkg_resources before we get a chance to specify our # requirements in bkr.server.wsgi processes.extend([ Process('gunicorn', args=[sys.executable, '-c', '__requires__ = ["CherryPy < 3.0"]; import pkg_resources; ' \ 'from gunicorn.app.wsgiapp import run; run()', '--bind', ':%s' % turbogears.config.get('server.socket_port'), '--workers', '8', '--access-logfile', '-', '--preload', 'bkr.server.wsgi:application'], listen_port=turbogears.config.get('server.socket_port')), ]) processes.extend([ Process('slapd', args=[ 'slapd', '-d0', '-F' + slapd_config_dir, '-hldap://127.0.0.1:3899/' ], listen_port=3899, stop_signal=signal.SIGINT), ]) try: for process in processes: process.start() except: for process in processes: process.stop() raise
def main(): log_to_stream(sys.stderr, level=logging.WARNING) command_container = BeakerCommandContainer(conf=conf) formatter = IndentedHelpFormatter(max_help_position=60, width=120) parser = BeakerOptionParser(version=__version__, conflict_handler='resolve', command_container=command_container, default_command="help", formatter=formatter) # This is parser.run(), but with more sensible error handling cmd, cmd_opts, cmd_args = parser.parse_args() if not cmd_opts.hub and not conf: sys.stderr.write("Configuration file not found. Please create an /etc/beaker/client.conf " "or ~/.beaker_client/config configuration file.\n") return 1 # Need to deal with the possibility that requests is not importable... try: import requests maybe_http_error = (requests.HTTPError,) except ImportError: maybe_http_error = () krb5_fcc_nofile = 2529639107 # No credentials cache found krb5krb_ap_err_tkt_expired = 2529638944 # Ticket expired kg_empty_ccache = 39756044 # Credential cache is empty try: return cmd.run(*cmd_args, **cmd_opts.__dict__) except gssapi.raw.GSSError as e: if e.min_code == krb5krb_ap_err_tkt_expired: # pylint: disable=no-member sys.stderr.write('Kerberos ticket expired (run kinit to obtain a new ticket)\n') return 1 elif e.min_code == krb5_fcc_nofile: # pylint: disable=no-member sys.stderr.write('No Kerberos credential cache file found (run kinit to create one)\n') return 1 elif e.min_code == kg_empty_ccache: # pylint: disable=no-member sys.stderr.write('Kerberos credential cache is empty (run kinit to create one)\n') return 1 else: raise except Fault as e: sys.stderr.write('XML-RPC fault: %s\n' % e.faultString) return 1 except maybe_http_error as e: warn_on_version_mismatch(e.response) sys.stderr.write('HTTP error: %s\n' % e) content_type, _ = cgi.parse_header(e.response.headers.get('Content-Type', '')) if content_type == 'text/plain': sys.stderr.write(e.response.content.decode('utf-8').rstrip('\n') + '\n') return 1 except BeakerJobTemplateError as e: sys.stderr.write('%s\n' % e) return 1 except BeakerClientConfigurationError as e: sys.stderr.write('%s\n' % e) return 1 except IOError as e: if e.errno == errno.EPIPE: # Let's assume it was EPIPE writing to stdout, because we were in # a shell pipeline and the other side exited early. return 128 + signal.SIGPIPE sys.stderr.write('%s\n' % e) return 1 finally: # If stdout is a closed pipe (as in the EPIPE case above) we will get # a nasty error on shutdown from sys.excepthook, since stdout will fail # to flush when it is being closed. Let's just suppress that here. try: sys.stdout.close() except IOError as e: if e.errno == errno.EPIPE: pass else: raise
from turbogears import config from turbogears.database import session import cherrypy import cherrypy._cpwsgi from cherrypy.filters.basefilter import BaseFilter from flask import Flask from bkr.server import identity, assets from bkr.server.app import app log = logging.getLogger(__name__) # Load config. from bkr.log import log_to_stream from bkr.server.util import load_config load_config() log_to_stream(sys.stderr, level=logging.DEBUG) application = app # Register all routes. import bkr.server.activity import bkr.server.user import bkr.server.group import bkr.server.systems import bkr.server.reserve_workflow import bkr.server.pools import bkr.server.jobs import bkr.server.recipes @app.before_first_request def init():
def setup_package(): assert os.path.exists(_config_file), "Config file %s must exist" % _config_file load_config(configfile=_config_file) log_to_stream(sys.stdout, level=logging.DEBUG) get_engine() metadata.create_all()
def main(): parser = OptionParser(description=__description__, version=__version__) parser.add_option('-c', '--config-file') parser.add_option('--debug', action='store_true', help='Show detailed information about image creation') parser.add_option('--no-upload', dest='upload', action='store_false', help='Skip uploading to Glance, leave image temp file on disk') parser.add_option('--os-username', help='OpenStack username') parser.add_option('--os-password', help='OpenStack password') parser.add_option('--os-tenant-name', help=SUPPRESS_HELP) parser.add_option('--os-project-name', help='OpenStack project name') parser.add_option('--os-project-domain-name', help='OpenStack project domain name') parser.add_option('--os-user-domain-name', help='OpenStack user domain name') parser.add_option('--image-visibility', help='OpenStack Image visibility', type='choice', choices=['public', 'private', 'shared', 'community'], default='public', ) parser.set_defaults(debug=False, upload=True) options, args = parser.parse_args() load_config_or_exit(options.config_file) log_to_stream(sys.stderr, level=logging.DEBUG if options.debug else logging.WARNING) if options.upload: if not has_keystoneclient: raise RuntimeError('python-keystoneclient is not installed') if not has_glanceclient: raise RuntimeError('python-glanceclient is not installed') # Get a Glance client. This seems more difficult than it should be... username = options.os_username or os.environ.get('OS_USERNAME') if not username: parser.error('Specify username with --os-username or env[OS_USERNAME]') password = options.os_password or os.environ.get('OS_PASSWORD') if not password: parser.error('Specify password with --os-password or env[OS_PASSWORD]') project_name = options.os_project_name or os.environ.get('OS_PROJECT_NAME') # for backwards compat if not project_name: project_name = options.os_tenant_name or os.environ.get('OS_TENANT_NAME') if not project_name: parser.error('Specify project with --os-project-name or env[OS_PROJECT_NAME]') auth_url = config.get('openstack.identity_api_url') if not auth_url: parser.error('OpenStack Identity API URL is not set in the configuration') user_domain_name = options.os_user_domain_name or \ os.environ.get('OS_USER_DOMAIN_NAME') project_domain_name = options.os_project_domain_name or \ os.environ.get('OS_PROJECT_DOMAIN_NAME') log.debug('Authenticating to Keystone') keystone = keystoneclient.v3.client.Client( username=username, password=password, project_name=project_name, user_domain_name=user_domain_name, project_domain_name=project_domain_name, auth_url=auth_url) log.debug('Looking up Glance URL in service catalog') glance_url = keystone.service_catalog.url_for(service_type='image', endpoint_type='publicURL') log.debug('Using Glance URL %s', glance_url) glance = glanceclient.v2.client.Client(glance_url, token=keystone.auth_token) # Generate and upload the image. with session.begin(): upload_image(glance, visibility=options.image_visibility) else: print generate_image(delete=False).name