def from_config(cls, **kwargs): host = env.get("sprout", {}).get("hostname", "localhost") port = env.get("sprout", {}).get("port", 8000) user = os.environ.get("SPROUT_USER", credentials.get("sprout", {}).get("username")) password = os.environ.get( "SPROUT_PASSWORD", credentials.get("sprout", {}).get("password")) if user and password: auth = user, password else: auth = None return cls(host=host, port=port, auth=auth, **kwargs)
def from_config(cls, **kwargs): host = env.get("sprout", {}).get("hostname", "localhost") port = env.get("sprout", {}).get("port", 8000) user_key = kwargs.pop('sprout_user_key') if 'sprout_user_key' in kwargs else None # First choose env var creds, then look in kwargs for a sprout_user_key to lookup user = (os.environ.get("SPROUT_USER") or (credentials.get(user_key, {}).get("username") if user_key else None)) password = (os.environ.get("SPROUT_PASSWORD") or (credentials.get(user_key, {}).get("password") if user_key else None)) if user and password: auth = user, password else: auth = None return cls(host=host, port=port, auth=auth, **kwargs)
def from_config(cls): bz_conf = env.get('bugzilla', {}) # default empty so we can call .get() later url = bz_conf.get('url') if url is None: url = 'https://bugzilla.redhat.com/xmlrpc.cgi' logger.warning("No Bugzilla URL specified in conf, using default: %s", url) cred_key = bz_conf.get("credentials") bz_kwargs = dict( url=url, cookiefile=None, tokenfile=None, product=bz_conf.get("bugzilla", {}).get("product"), config_options=bz_conf) if cred_key: bz_creds = credentials.get(cred_key, {}) if bz_creds.get('username'): logger.info('Using username/password for Bugzilla authentication') bz_kwargs.update(dict( user=bz_creds.get("username"), password=bz_creds.get("password") )) elif bz_creds.get('api_key'): logger.info('Using api key for Bugzilla authentication') bz_kwargs.update(dict(api_key=bz_creds.get('api_key'))) else: logger.error('Credentials key for bugzilla does not have username or api key') else: logger.warn('No credentials found for bugzilla') return cls(**bz_kwargs)
def pytest_configure(config): art_client = get_client(art_config=env.get('artifactor', {}), pytest_config=config) # just in case if not store.slave_manager: with diaper: atexit.register(shutdown, config) if art_client: config._art_proc = spawn_server(config, art_client) wait_for(net_check, func_args=[art_client.port, '127.0.0.1'], func_kwargs={'force': True}, num_sec=10, message="wait for artifactor to start") art_client.ready = True else: config._art_proc = None from cfme.utils.log import artifactor_handler artifactor_handler.artifactor = art_client if store.slave_manager: artifactor_handler.slaveid = store.slaveid config._art_client = art_client art_client.fire_hook('setup_merkyl', ip=get_or_create_current_appliance().address)
def pytest_configure(config): if config.getoption('--help'): return art_client = get_client(art_config=env.get('artifactor', {}), pytest_config=config) # just in case if not store.slave_manager: with diaper: atexit.register(shutdown, config) if art_client: config._art_proc = spawn_server(config, art_client) wait_for(net_check, func_args=[art_client.port, '127.0.0.1'], func_kwargs={'force': True}, num_sec=10, message="wait for artifactor to start") art_client.ready = True else: config._art_proc = None from cfme.utils.log import artifactor_handler artifactor_handler.artifactor = art_client if store.slave_manager: artifactor_handler.slaveid = store.slaveid config._art_client = art_client
def run(port, run_id=None): art_config = env.get("artifactor", {}) art_config["server_port"] = int(port) art = Artifactor(None) if "log_dir" not in art_config: art_config["log_dir"] = log_path.strpath if "artifact_dir" not in art_config: art_config["artifact_dir"] = log_path.join("artifacts").strpath art.set_config(art_config) art.register_plugin(merkyl.Merkyl, "merkyl") art.register_plugin(logger.Logger, "logger") art.register_plugin(video.Video, "video") art.register_plugin(filedump.Filedump, "filedump") art.register_plugin(reporter.Reporter, "reporter") art.register_plugin(post_result.PostResult, "post-result") art.register_plugin(ostriz.Ostriz, "ostriz") initialize(art) art.configure_plugin("merkyl") art.configure_plugin("logger") art.configure_plugin("video") art.configure_plugin("filedump") art.configure_plugin("reporter") art.configure_plugin("post-result") art.configure_plugin("ostriz") art.fire_hook("start_session", run_id=run_id)
def run(port, run_id=None): art_config = env.get('artifactor', {}) art_config['server_port'] = int(port) art = Artifactor(None) if 'log_dir' not in art_config: art_config['log_dir'] = log_path.strpath if 'artifact_dir' not in art_config: art_config['artifact_dir'] = log_path.join('artifacts').strpath art.set_config(art_config) art.register_plugin(merkyl.Merkyl, "merkyl") art.register_plugin(logger.Logger, "logger") art.register_plugin(video.Video, "video") art.register_plugin(filedump.Filedump, "filedump") art.register_plugin(reporter.Reporter, "reporter") art.register_plugin(post_result.PostResult, "post-result") art.register_plugin(ostriz.Ostriz, "ostriz") initialize(art) art.configure_plugin('merkyl') art.configure_plugin('logger') art.configure_plugin('video') art.configure_plugin('filedump') art.configure_plugin('reporter') art.configure_plugin('post-result') art.configure_plugin('ostriz') art.fire_hook('start_session', run_id=run_id)
def pytest_configure(config): if config.getoption('--help'): return art_client = get_client( art_config=env.get('artifactor', {}), pytest_config=config) # just in case if not store.slave_manager: with diaper: atexit.register(shutdown, config) if art_client: config._art_proc = spawn_server(config, art_client) wait_for( net_check, func_args=[art_client.port, '127.0.0.1'], func_kwargs={'force': True}, num_sec=10, message="wait for artifactor to start") art_client.ready = True else: config._art_proc = None from cfme.utils.log import artifactor_handler artifactor_handler.artifactor = art_client if store.slave_manager: artifactor_handler.slaveid = store.slaveid config._art_client = art_client
def main(): host = env.get("sprout", {}).get("hostname", "localhost") port = env.get("sprout", {}).get("port", 8000) command_args = sys.argv[1:] try: method = command_args.pop(0) except IndexError: raise Exception("You have to specify the method!") args = [] while command_args and "=" not in command_args[ 0] and ":" not in command_args[0]: value = command_args.pop(0) try: value = int(value) except ValueError: pass args.append(value) kwargs = {} while command_args and "=" in command_args[0] and ":" not in command_args[ 0]: param, value = command_args.pop(0).split("=", 1) try: value = int(value) except ValueError: pass kwargs[param] = value additional_kwargs = {} if command_args and ":" in command_args[0]: additional_kwargs["auth"] = [ x.strip() for x in command_args[0].split(":", 1) ] elif "SPROUT_USER" in os.environ and "SPROUT_PASSWORD" in os.environ: additional_kwargs["auth"] = os.environ["SPROUT_USER"], os.environ[ "SPROUT_PASSWORD"] elif "SPROUT_PASSWORD" in os.environ: additional_kwargs["auth"] = os.environ["USER"], os.environ[ "SPROUT_PASSWORD"] client = SproutClient(host=host, port=port, **additional_kwargs) print(json.dumps(client.call_method(method, *args, **kwargs)))
def composite_uncollect(build, source='jenkins'): """Composite build function""" since = env.get('ts', time.time()) url = "{0}?build={1}&source={2}&since={3}".format( conf['ostriz'], urllib.quote(build), urllib.quote(source), urllib.quote(since)) try: resp = requests.get(url, timeout=10) return resp.json() except Exception as e: print(e) return {'tests': []}
def composite_uncollect(build, source='jenkins', limit_ts=None): """Composite build function""" since = env.get('ts', time.time()) params = {"build": build, "source": source, "since": since} if limit_ts: params['limit_ts'] = limit_ts try: resp = session.get(conf['ostriz'], params=params, timeout=10) return resp.json() except Exception as e: print(e) return {'tests': []}
def from_config(cls): bz_conf = env.get('bugzilla', {}) # default empty so we can call .get() later url = bz_conf.get('url') if url is None: url = 'https://bugzilla.redhat.com/xmlrpc.cgi' logger.warning("No Bugzilla URL specified in conf, using default: %s", url) cred_key = bz_conf.get("bugzilla", {}).get("credentials") return cls(url=url, user=credentials.get(cred_key, {}).get("username"), password=credentials.get(cred_key, {}).get("password"), cookiefile=None, tokenfile=None, product=bz_conf.get("bugzilla", {}).get("product"))
def from_config(cls): bz_conf = env.get('bugzilla', {}) # default empty so we can call .get() later url = bz_conf.get('url') if url is None: url = 'https://bugzilla.redhat.com/xmlrpc.cgi' logger.warning("No Bugzilla URL specified in conf, using default: %s", url) cred_key = bz_conf.get("bugzilla", {}).get("credentials") return cls(url=url, user=credentials.get(cred_key, {}).get("username"), password=credentials.get(cred_key, {}).get("password"), cookiefile=None, tokenfile=None, product=bz_conf.get("bugzilla", {}).get("product"), config_options=bz_conf)
def main(): host = env.get("sprout", {}).get("hostname", "localhost") port = env.get("sprout", {}).get("port", 8000) command_args = sys.argv[1:] try: method = command_args.pop(0) except IndexError: raise Exception("You have to specify the method!") args = [] while command_args and "=" not in command_args[0] and ":" not in command_args[0]: value = command_args.pop(0) try: value = int(value) except ValueError: pass args.append(value) kwargs = {} while command_args and "=" in command_args[0] and ":" not in command_args[0]: param, value = command_args.pop(0).split("=", 1) try: value = int(value) except ValueError: pass kwargs[param] = value additional_kwargs = {} if command_args and ":" in command_args[0]: additional_kwargs["auth"] = [x.strip() for x in command_args[0].split(":", 1)] elif "SPROUT_USER" in os.environ and "SPROUT_PASSWORD" in os.environ: additional_kwargs["auth"] = os.environ["SPROUT_USER"], os.environ["SPROUT_PASSWORD"] elif "SPROUT_PASSWORD" in os.environ: additional_kwargs["auth"] = os.environ["USER"], os.environ["SPROUT_PASSWORD"] client = SproutClient(host=host, port=port, **additional_kwargs) print(json.dumps(client.call_method(method, *args, **kwargs)))
def composite_uncollect(build, source='jenkins', limit_ts=None): """Composite build function""" since = env.get('ts', time.time()) params = {"build": build, "source": source, "since": since} if limit_ts: params['limit_ts'] = limit_ts try: resp = session.get( conf['ostriz'], params=params, timeout=10) return resp.json() except Exception as e: print(e) return {'tests': []}
def pytest_configure(config): is_dev = False for appliance in env.get('appliances', []) or []: if appliance.get('is_dev', False): is_dev = True tb_url = trackerbot.conf.get('url') if (config.getoption('--help') or store.parallelizer_role == 'master' or tb_url is None or is_dev): return # A further optimization here is to make the calls to trackerbot per provider # and perhaps only pull the providers that are needed, however that will need # to ensure that the tests that just randomly use providers adhere to the filters # which may be too tricky right now. count = 0 if not config.getoption('use_template_cache'): store.terminalreporter.line("Loading templates from trackerbot...", green=True) provider_templates = trackerbot.provider_templates(trackerbot.api()) for provider in list_provider_keys(): TEMPLATES[provider] = provider_templates.get(provider, []) config.cache.set('miq-trackerbot/{}'.format(provider), TEMPLATES[provider]) count += len(TEMPLATES[provider]) else: store.terminalreporter.line("Using templates from cache...", green=True) provider_templates = None for provider in list_provider_keys(): templates = config.cache.get('miq-trackerbot/{}'.format(provider), None) if templates is None: store.terminalreporter.line( "Loading templates for {} from source as not in cache". format(provider), green=True) if not provider_templates: provider_templates = trackerbot.provider_templates( trackerbot.api()) templates = provider_templates.get(provider, []) config.cache.set('miq-trackerbot/{}'.format(provider), templates) count += len(templates) TEMPLATES[provider] = templates store.terminalreporter.line( " Loaded {} templates successfully!".format(count), green=True)
def composite_uncollect(build, source='jenkins', limit_ts=None): """Composite build function""" since = env.get('ts', time.time()) params = {"build": build, "source": source, "since": since} if limit_ts: params['limit_ts'] = limit_ts try: resp = session.get( conf['ostriz'], params=params, timeout=(6, 60) # 6s connect, 60s read ) return resp.json() except Exception: logger.exception('Composite Uncollect hit an exception making request') return {'tests': []}
('upstream_stable', r'^miq-stable-(?P<release>gapri[-\w]*?)' # release name limit to 5 chars r'-(?P<year>\d{4})(?P<month>\d{2})(?P<day>\d{2})'), ('upstream_euwe', r'^miq-stable-(?P<release>euwe[-\w]*?)' r'-(?P<year>\d{4})(?P<month>\d{2})(?P<day>\d{2})'), ('upstream_fine', r'^miq-stable-(?P<release>fine[-\w]*?)' r'-(?P<year>\d{4})(?P<month>\d{2})(?P<day>\d{2})'), ('downstream-nightly', r'^cfme-nightly-(?P<year>\d{4})(?P<month>\d{2})(?P<day>\d{2})'), # new format ('downstream-nightly', r'^cfme-nightly-\d*-(?P<year>\d{4})(?P<month>\d{2})(?P<day>\d{2})'), ) generic_matchers = ( ('sprout', r'^s_tpl'), ('sprout', r'^sprout_template'), ('rhevm-internal', r'^auto-tmp'), ) conf = env.get('trackerbot', {}) _active_streams = None TemplateInfo = namedtuple('TemplateInfo', ['group_name', 'datestamp', 'stream']) def cmdline_parser(): """Get a parser with basic trackerbot configuration params already set up It will use the following keys from the env conf if they're available:: # with example values trackerbot: url: http://hostname/api/ username: username apikey: 0123456789abcdef
def smtp_test(request, appliance): """Fixture, which prepares the appliance for e-mail capturing tests Returns: :py:class:`util.smtp_collector_client.SMTPCollectorClient` instance. """ logger.info("Preparing start for e-mail collector") ports = env.get("mail_collector", {}).get("ports", {}) mail_server_port = ports.get("smtp", False) or os.getenv( 'SMTP', False) or random_port() mail_query_port = ports.get("json", False) or os.getenv( 'JSON', False) or random_port() my_ip = my_ip_address() logger.info("Mind that it needs ports %s and %s open", mail_query_port, mail_server_port) appliance.server.settings.update_smtp_server({ 'host': my_ip, 'port': str(mail_server_port), 'auth': "none" }) server_filename = scripts_path.join('smtp_collector.py').strpath server_command = server_filename + " --smtp-port {} --query-port {}".format( mail_server_port, mail_query_port) logger.info("Starting mail collector %s", server_command) collector = None def _finalize(): if collector is None: return logger.info("Sending KeyboardInterrupt to collector") try: collector.send_signal(signal.SIGINT) except OSError as e: # TODO: Better logging. logger.exception(e) logger.error("Something happened to the e-mail collector!") return time.sleep(2) if collector.poll() is None: logger.info("Sending SIGTERM to collector") collector.send_signal(signal.SIGTERM) time.sleep(5) if collector.poll() is None: logger.info("Sending SIGKILL to collector") collector.send_signal(signal.SIGKILL) collector.wait() logger.info("Collector finished") logger.info("Cleaning up smtp setup in CFME") collector = subprocess.Popen(server_command, shell=True) request.addfinalizer(_finalize) logger.info("Collector pid %s", collector.pid) logger.info("Waiting for collector to become alive.") time.sleep(3) assert collector.poll( ) is None, "Collector has died. Something must be blocking selected ports" logger.info("Collector alive") query_port_open = net_check_remote(mail_query_port, my_ip, force=True) server_port_open = net_check_remote(mail_server_port, my_ip, force=True) assert query_port_open and server_port_open,\ 'Ports {} and {} on the machine executing the tests are closed.\n'\ 'The ports are randomly chosen -> turn firewall off.'\ .format(mail_query_port, mail_server_port) client = SMTPCollectorClient(my_ip, mail_query_port) client.set_test_name(request.node.name) client.clear_database() return client
import argparse import json import time from collections import defaultdict import requests import slumber from six.moves.urllib_parse import urlparse, parse_qs from cfme.utils.conf import env from cfme.utils.providers import providers_data session = requests.Session() conf = env.get('trackerbot', {}) _active_streams = None def cmdline_parser(): """Get a parser with basic trackerbot configuration params already set up It will use the following keys from the env conf if they're available:: # with example values trackerbot: url: http://hostname/api/ username: username apikey: 0123456789abcdef """ # Set up defaults from env, if they're set, otherwise require them on the commandline def_url = {'default': None, 'nargs': '?'} if 'url' in conf else {}
def smtp_test(request, appliance): """Fixture, which prepares the appliance for e-mail capturing tests Returns: :py:class:`util.smtp_collector_client.SMTPCollectorClient` instance. """ logger.info("Preparing start for e-mail collector") ports = env.get("mail_collector", {}).get("ports", {}) mail_server_port = ports.get("smtp", False) or os.getenv('SMTP', False) or random_port() mail_query_port = ports.get("json", False) or os.getenv('JSON', False) or random_port() my_ip = my_ip_address() logger.info("Mind that it needs ports %s and %s open", mail_query_port, mail_server_port) appliance.server.settings.update_smtp_server({ 'host': my_ip, 'port': str(mail_server_port), 'auth': "none" }) server_filename = scripts_path.join('smtp_collector.py').strpath server_command = server_filename + " --smtp-port {} --query-port {}".format( mail_server_port, mail_query_port ) logger.info("Starting mail collector %s", server_command) collector = None def _finalize(): if collector is None: return logger.info("Sending KeyboardInterrupt to collector") try: collector.send_signal(signal.SIGINT) except OSError as e: # TODO: Better logging. logger.exception(e) logger.error("Something happened to the e-mail collector!") return time.sleep(2) if collector.poll() is None: logger.info("Sending SIGTERM to collector") collector.send_signal(signal.SIGTERM) time.sleep(5) if collector.poll() is None: logger.info("Sending SIGKILL to collector") collector.send_signal(signal.SIGKILL) collector.wait() logger.info("Collector finished") logger.info("Cleaning up smtp setup in CFME") collector = subprocess.Popen(server_command, shell=True) request.addfinalizer(_finalize) logger.info("Collector pid %s", collector.pid) logger.info("Waiting for collector to become alive.") time.sleep(3) assert collector.poll() is None, "Collector has died. Something must be blocking selected ports" logger.info("Collector alive") query_port_open = net_check_remote(mail_query_port, my_ip, force=True) server_port_open = net_check_remote(mail_server_port, my_ip, force=True) assert query_port_open and server_port_open,\ 'Ports {} and {} on the machine executing the tests are closed.\n'\ 'The ports are randomly chosen -> turn firewall off.'\ .format(mail_query_port, mail_server_port) client = SMTPCollectorClient( my_ip, mail_query_port ) client.set_test_name(request.node.name) client.clear_database() return client
logging: video: enabled: True dir: video display: ":99" quality: 10 """ import os import subprocess from signal import SIGINT from cfme.utils.conf import env # from utils.log import logger vid_options = env.get('logging', {}).get('video') def process_running(pid): """Check whether specified process is running""" try: os.kill(pid, 0) except OSError as e: if e.errno == 3: return False else: raise else: return True
video: enabled: True dir: video display: ":99" quality: 10 """ import os.path import re import pytest from cfme.utils.conf import env from cfme.utils.path import log_path from cfme.utils.video import Recorder vid_options = env.get('logging', {}).get('video') recorder = None def get_path_and_file_name(node): """Extract filename and location from the node. Args: node: py.test collection node to examine. Returns: 2-tuple `(path, filename)` """ vid_name = re.sub(r"[^a-zA-Z0-9_.\-\[\]]", "_", node.name) # Limit only sane characters vid_name = re.sub(r"[/]", "_", vid_name) # To be sure this guy doesn't get in vid_name = re.sub(r"__+", "_", vid_name) # Squash _'s to limit the length return node.parent.name, vid_name