def from_config(cls, **kwargs): host = env.get("sprout", {}).get("hostname", "localhost") port = env.get("sprout", {}).get("port", 8000) user = os.environ.get("SPROUT_USER", credentials.get("sprout", {}).get("username", None)) password = os.environ.get( "SPROUT_PASSWORD", credentials.get("sprout", {}).get("password", None)) if user and password: auth = user, password else: auth = None return cls(host=host, port=port, auth=auth, **kwargs)
def from_config(cls, **kwargs): host = env.get("sprout", {}).get("hostname", "localhost") port = env.get("sprout", {}).get("port", 8000) user = os.environ.get("SPROUT_USER", credentials.get("sprout", {}).get("username")) password = os.environ.get( "SPROUT_PASSWORD", credentials.get("sprout", {}).get("password")) if user and password: auth = user, password else: auth = None return cls(host=host, port=port, auth=auth, **kwargs)
def run(port, run_id=None): art_config = env.get('artifactor', {}) art_config['server_port'] = int(port) art = Artifactor(None) if 'log_dir' not in art_config: art_config['log_dir'] = log_path.strpath if 'artifact_dir' not in art_config: art_config['artifact_dir'] = log_path.join('artifacts').strpath art.set_config(art_config) art.register_plugin(merkyl.Merkyl, "merkyl") art.register_plugin(logger.Logger, "logger") art.register_plugin(video.Video, "video") art.register_plugin(filedump.Filedump, "filedump") art.register_plugin(reporter.Reporter, "reporter") art.register_plugin(post_result.PostResult, "post-result") art.register_plugin(ostriz.Ostriz, "ostriz") initialize(art) art.configure_plugin('merkyl') art.configure_plugin('logger') art.configure_plugin('video') art.configure_plugin('filedump') art.configure_plugin('reporter') art.configure_plugin('post-result') art.configure_plugin('ostriz') art.fire_hook('start_session', run_id=run_id)
def run(port, run_id=None): art_config = env.get('artifactor', {}) art_config['server_port'] = int(port) art = Artifactor(None) if 'log_dir' not in art_config: art_config['log_dir'] = log_path.join('artifacts').strpath art.set_config(art_config) art.register_plugin(merkyl.Merkyl, "merkyl") art.register_plugin(logger.Logger, "logger") art.register_plugin(video.Video, "video") art.register_plugin(filedump.Filedump, "filedump") art.register_plugin(softassert.SoftAssert, "softassert") art.register_plugin(reporter.Reporter, "reporter") art.register_plugin(post_result.PostResult, "post-result") art.register_hook_callback('filedump', 'pre', parse_setup_dir, name="filedump_dir_setup") initialize(art) art.configure_plugin('merkyl') art.configure_plugin('logger') art.configure_plugin('video') art.configure_plugin('filedump') art.configure_plugin('softassert') art.configure_plugin('reporter') art.configure_plugin('post-result') art.fire_hook('start_session', run_id=run_id)
def pytest_configure(config): art_client = get_client( art_config=env.get('artifactor', {}), pytest_config=config) # just in case if not store.slave_manager: with diaper: atexit.register(shutdown, config) if art_client: config._art_proc = spawn_server(config, art_client) wait_for( net_check, func_args=[art_client.port, '127.0.0.1'], func_kwargs={'force': True}, num_sec=10, message="wait for artifactor to start") art_client.ready = True else: config._art_proc = None from utils.log import artifactor_handler artifactor_handler.artifactor = art_client if store.slave_manager: artifactor_handler.slaveid = store.slaveid config._art_client = art_client art_client.fire_hook('setup_merkyl', ip=get_or_create_current_appliance().address)
def run(port, run_id=None): art_config = env.get('artifactor', {}) art_config['server_port'] = int(port) art = Artifactor(None) if 'log_dir' not in art_config: art_config['log_dir'] = log_path.join('artifacts').strpath art.set_config(art_config) art.register_plugin(merkyl.Merkyl, "merkyl") art.register_plugin(logger.Logger, "logger") art.register_plugin(video.Video, "video") art.register_plugin(filedump.Filedump, "filedump") art.register_plugin(reporter.Reporter, "reporter") art.register_hook_callback('filedump', 'pre', parse_setup_dir, name="filedump_dir_setup") initialize(art) ip = urlparse(env['base_url']).hostname art.configure_plugin('merkyl', ip=ip) art.configure_plugin('logger') art.configure_plugin('video') art.configure_plugin('filedump') art.configure_plugin('reporter') art.fire_hook('start_session', run_id=run_id)
def smtp_test(request): """Fixture, which prepares the appliance for e-mail capturing tests Returns: :py:class:`util.smtp_collector_client.SMTPCollectorClient` instance. """ logger.info("Preparing start for e-mail collector") ports = env.get("mail_collector", {}).get("ports", {}) mail_server_port = ports.get("smtp", None) or random_port() mail_query_port = ports.get("json", None) or random_port() my_ip = my_ip_address() logger.info("Mind that it needs ports %s and %s open", mail_query_port, mail_server_port) smtp_conf = configuration.SMTPSettings(host=my_ip, port=mail_server_port, auth="none") smtp_conf.update() server_filename = scripts_path.join("smtp_collector.py").strpath server_command = server_filename + " --smtp-port {} --query-port {}".format(mail_server_port, mail_query_port) logger.info("Starting mail collector %s", server_command) collector = None def _finalize(): if collector is None: return logger.info("Sending KeyboardInterrupt to collector") try: collector.send_signal(signal.SIGINT) except OSError as e: # TODO: Better logging. logger.exception(e) logger.error("Something happened to the e-mail collector!") return time.sleep(2) if collector.poll() is None: logger.info("Sending SIGTERM to collector") collector.send_signal(signal.SIGTERM) time.sleep(5) if collector.poll() is None: logger.info("Sending SIGKILL to collector") collector.send_signal(signal.SIGKILL) collector.wait() logger.info("Collector finished") collector = subprocess.Popen(server_command, shell=True) request.addfinalizer(_finalize) logger.info("Collector pid %s", collector.pid) logger.info("Waiting for collector to become alive.") time.sleep(3) assert collector.poll() is None, "Collector has died. Something must be blocking selected ports" logger.info("Collector alive") query_port_open = net_check_remote(mail_query_port, my_ip, force=True) server_port_open = net_check_remote(mail_server_port, my_ip, force=True) assert query_port_open and server_port_open, ( "Ports {} and {} on the machine executing the tests are closed.\n" "The ports are randomly chosen -> turn firewall off.".format(mail_query_port, mail_server_port) ) client = SMTPCollectorClient(my_ip, mail_query_port) client.set_test_name(request.node.name) client.clear_database() return client
def composite_uncollect(build): """Composite build function""" since = env.get('ts', time.time()) url = "{}?build={}&source=jenkins&since={}".format(conf['ostriz'], urllib.quote(build), since) try: resp = requests.get(url, timeout=10) return resp.json() except Exception as e: print(e) return {'tests': []}
def main(): host = env.get("sprout", {}).get("hostname", "localhost") port = env.get("sprout", {}).get("port", 8000) command_args = sys.argv[1:] try: method = command_args.pop(0) except IndexError: raise Exception("You have to specify the method!") args = [] while command_args and "=" not in command_args[ 0] and ":" not in command_args[0]: value = command_args.pop(0) try: value = int(value) except ValueError: pass args.append(value) kwargs = {} while command_args and "=" in command_args[0] and ":" not in command_args[ 0]: param, value = command_args.pop(0).split("=", 1) try: value = int(value) except ValueError: pass kwargs[param] = value additional_kwargs = {} if command_args and ":" in command_args[0]: additional_kwargs["auth"] = [ x.strip() for x in command_args[0].split(":", 1) ] elif "SPROUT_USER" in os.environ and "SPROUT_PASSWORD" in os.environ: additional_kwargs["auth"] = os.environ["SPROUT_USER"], os.environ[ "SPROUT_PASSWORD"] elif "SPROUT_PASSWORD" in os.environ: additional_kwargs["auth"] = os.environ["USER"], os.environ[ "SPROUT_PASSWORD"] client = SproutClient(host=host, port=port, **additional_kwargs) print json.dumps(client.call_method(method, *args, **kwargs))
def main(): host = env.get("sprout", {}).get("hostname", "localhost") port = env.get("sprout", {}).get("port", 8000) command_args = sys.argv[1:] try: method = command_args.pop(0) except IndexError: raise Exception("You have to specify the method!") args = [] while command_args and "=" not in command_args[0] and ":" not in command_args[0]: value = command_args.pop(0) try: value = int(value) except ValueError: pass args.append(value) kwargs = {} while command_args and "=" in command_args[0] and ":" not in command_args[0]: param, value = command_args.pop(0).split("=", 1) try: value = int(value) except ValueError: pass kwargs[param] = value additional_kwargs = {} if command_args and ":" in command_args[0]: additional_kwargs["auth"] = [x.strip() for x in command_args[0].split(":", 1)] elif "SPROUT_USER" in os.environ and "SPROUT_PASSWORD" in os.environ: additional_kwargs["auth"] = os.environ["SPROUT_USER"], os.environ["SPROUT_PASSWORD"] elif "SPROUT_PASSWORD" in os.environ: additional_kwargs["auth"] = os.environ["USER"], os.environ["SPROUT_PASSWORD"] client = SproutClient(host=host, port=port, **additional_kwargs) print(json.dumps(client.call_method(method, *args, **kwargs)))
def main(): parser = argparse.ArgumentParser(epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument( "--address", help="hostname or ip address of target appliance", default=env.get("base_url", None) ) parser.add_argument("--vddk_url", help="url to download vddk pkg") parser.add_argument( "--reboot", help="reboot after installation " + "(required for proper operation)", action="store_true" ) parser.add_argument("--force", help="force installation if version detected", action="store_true") args = parser.parse_args() address = urlparse(args.address).netloc appliance = IPAppliance(address=address) appliance.install_vddk(reboot=args.reboot, force=args.force, vddk_url=args.vddk_url, log_callback=log)
def _smtp_test_session(request): """Fixture, which prepares the appliance for e-mail capturing tests Returns: :py:class:`util.smtp_collector_client.SMTPCollectorClient` instance. """ logger.info("Preparing start for e-mail collector") ports = env.get("mail_collector", {}).get("ports", {}) mail_server_port = ports.get("smtp", None) or random_port() mail_query_port = ports.get("json", None) or random_port() my_ip = my_ip_address() logger.info("Mind that it needs ports %d and %d open" % (mail_query_port, mail_server_port)) smtp_conf = configuration.SMTPSettings( host=my_ip, port=mail_server_port, auth="none", ) smtp_conf.update() server_filename = scripts_path.join('smtp_collector.py').strpath server_command = server_filename + " --smtp-port %d --query-port %d" % ( mail_server_port, mail_query_port ) logger.info("Starting mail collector %s" % server_command) collector = subprocess.Popen(server_command, shell=True) logger.info("Collector pid %d" % collector.pid) logger.info("Waiting for collector to become alive.") time.sleep(3) assert collector.poll() is None, "Collector has died. Something must be blocking selected ports" logger.info("Collector alive") query_port_open = net_check_remote(mail_query_port, my_ip, force=True) server_port_open = net_check_remote(mail_server_port, my_ip, force=True) assert query_port_open and server_port_open,\ 'Ports %d and %d on the machine executing the tests are closed.\n'\ 'The ports are randomly chosen -> turn firewall off.'\ % (mail_query_port, mail_server_port) client = SMTPCollectorClient( my_ip, mail_query_port ) yield client logger.info("Sending KeyboardInterrupt to collector") collector.send_signal(signal.SIGINT) collector.wait() logger.info("Collector finished")
def main(): parser = argparse.ArgumentParser(epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument( '--address', help='hostname or ip address of target appliance', default=env.get("base_url", None)) parser.add_argument('--vddk_url', help='url to download vddk pkg') parser.add_argument('--reboot', help='reboot after installation ' + '(required for proper operation)', action="store_true") parser.add_argument('--force', help='force installation if version detected', action="store_true") args = parser.parse_args() address = urlparse(args.address).netloc appliance = IPAppliance(address=address) appliance.install_vddk( reboot=args.reboot, force=args.force, vddk_url=args.vddk_url, log_callback=log)
def main(): parser = argparse.ArgumentParser(epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument( '--address', help='hostname or ip address of target appliance', default=parse_if_not_none(env.get("base_url", None))) parser.add_argument( '--sdk_url', help='url to download sdk pkg', default=cfme_data.get("basic_info", {}).get("netapp_sdk_url", None)) parser.add_argument('--restart', help='restart evmserverd after installation ' + '(required for proper operation)', action="store_true") args = parser.parse_args() print('Address: {}'.format(args.address)) print('SDK URL: {}'.format(args.sdk_url)) print('Restart: {}'.format(args.restart)) appliance = IPAppliance(address=args.address) appliance.install_netapp_sdk(sdk_url=args.sdk_url, reboot=args.restart, log_callback=log)
def pytest_configure(config): art_client = get_client(art_config=env.get('artifactor', {}), pytest_config=config) # just in case if not store.slave_manager: with diaper: atexit.register(shutdown, config) if art_client: config._art_proc = spawn_server(config, art_client) wait_for(net_check, func_args=[art_client.port, '127.0.0.1'], func_kwargs={'force': True}, num_sec=10, message="wait for artifactor to start") art_client.ready = True else: config._art_proc = None from utils.log import artifactor_handler artifactor_handler.artifactor = art_client config._art_client = art_client art_client.fire_hook('setup_merkyl', ip=urlparse(env['base_url']).netloc)
def main(): parser = argparse.ArgumentParser( epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--address', help='hostname or ip address of target appliance', default=parse_if_not_none(env.get("base_url", None))) parser.add_argument('--sdk_url', help='url to download sdk pkg', default=cfme_data.get("basic_info", {}).get("netapp_sdk_url", None)) parser.add_argument('--restart', help='restart evmserverd after installation ' + '(required for proper operation)', action="store_true") args = parser.parse_args() print('Address: {}'.format(args.address)) print('SDK URL: {}'.format(args.sdk_url)) print('Restart: {}'.format(args.restart)) appliance = IPAppliance(address=args.address) appliance.install_netapp_sdk(sdk_url=args.sdk_url, reboot=args.restart, log_callback=log)
def fire_hook(self, *args, **kwargs): return def terminate(self): return def task_status(self): return def __nonzero__(self): # DummyClient is always False, so it's easy to see if we have an artiactor client return False proc = None art_config = env.get('artifactor', {}) if art_config: # If server_port isn't set, pick a random port if 'server_port' not in art_config: port = random_port() art_config['server_port'] = port art_client = ArtifactorClient(art_config['server_address'], art_config['server_port']) else: art_client = DummyClient() SLAVEID = "" if env.get('slaveid', None): SLAVEID = env['slaveid']
import slumber from utils.conf import env # regexen to match templates to streams and pull out the date # stream names must be slugified (alphanumeric, dashes, underscores only) # regex must include month and day, may include year # If year is unset, will be the most recent month/day (not in the future) stream_matchers = ( ('upstream', '^miq-nightly-(?P<year>\d{4})(?P<month>\d{2})(?P<day>\d{2})'), ('downstream-52z', r'^cfme-52.*-(?P<month>\d{2})(?P<day>\d{2})'), ('downstream-53z', r'^cfme-53.*-(?P<month>\d{2})(?P<day>\d{2})'), # Nightly builds are currently in the 5.3.z stream ('downstream-53z', r'^cfme-nightly-(?P<year>\d{4})(?P<month>\d{2})(?P<day>\d{2})'), ) trackerbot_conf = env.get('trackerbot', {}) def cmdline_parser(): """Get a parser with basic trackerbot configuration params already set up It will use the following keys from the env conf if they're available:: # with example values trackerbot: url: http://hostname/api/ username: username apikey: 0123456789abcdef """ # Set up defaults from env, if they're set, otherwise require them on the commandline
(get_stream('5.5'), r'^cfme-55.*-(?P<month>\d{2})(?P<day>\d{2})'), (get_stream('5.6'), r'^cfme-56.*-(?P<month>\d{2})(?P<day>\d{2})'), # Nightly builds have potentially multiple version streams bound to them so we # cannot use get_stream() ('downstream-nightly', r'^cfme-nightly-(?P<year>\d{4})(?P<month>\d{2})(?P<day>\d{2})'), # new format ('downstream-nightly', r'^cfme-nightly-\d*-(?P<year>\d{4})(?P<month>\d{2})(?P<day>\d{2})'), ) generic_matchers = ( ('sprout', r'^s_tpl'), ('sprout', r'^sprout_template'), ('rhevm-internal', r'^auto-tmp'), ) conf = env.get('trackerbot', {}) _active_streams = None TemplateInfo = namedtuple('TemplateInfo', ['group_name', 'datestamp', 'stream']) def cmdline_parser(): """Get a parser with basic trackerbot configuration params already set up It will use the following keys from the env conf if they're available:: # with example values trackerbot: url: http://hostname/api/ username: username
def smtp_test(request): """Fixture, which prepares the appliance for e-mail capturing tests Returns: :py:class:`util.smtp_collector_client.SMTPCollectorClient` instance. """ logger.info("Preparing start for e-mail collector") ports = env.get("mail_collector", {}).get("ports", {}) mail_server_port = ports.get("smtp", None) or random_port() mail_query_port = ports.get("json", None) or random_port() my_ip = my_ip_address() logger.info("Mind that it needs ports {} and {} open".format( mail_query_port, mail_server_port)) smtp_conf = configuration.SMTPSettings( host=my_ip, port=mail_server_port, auth="none", ) smtp_conf.update() server_filename = scripts_path.join('smtp_collector.py').strpath server_command = server_filename + " --smtp-port {} --query-port {}".format( mail_server_port, mail_query_port) logger.info("Starting mail collector {}".format(server_command)) collector = None def _finalize(): if collector is None: return logger.info("Sending KeyboardInterrupt to collector") try: collector.send_signal(signal.SIGINT) except OSError as e: # TODO: Better logging. logger.exception(e) logger.error("Something happened to the e-mail collector!") return time.sleep(2) if collector.poll() is None: logger.info("Sending SIGTERM to collector") collector.send_signal(signal.SIGTERM) time.sleep(5) if collector.poll() is None: logger.info("Sending SIGKILL to collector") collector.send_signal(signal.SIGKILL) collector.wait() logger.info("Collector finished") collector = subprocess.Popen(server_command, shell=True) request.addfinalizer(_finalize) logger.info("Collector pid {}".format(collector.pid)) logger.info("Waiting for collector to become alive.") time.sleep(3) assert collector.poll( ) is None, "Collector has died. Something must be blocking selected ports" logger.info("Collector alive") query_port_open = net_check_remote(mail_query_port, my_ip, force=True) server_port_open = net_check_remote(mail_server_port, my_ip, force=True) assert query_port_open and server_port_open,\ 'Ports {} and {} on the machine executing the tests are closed.\n'\ 'The ports are randomly chosen -> turn firewall off.'\ .format(mail_query_port, mail_server_port) client = SMTPCollectorClient(my_ip, mail_query_port) client.set_test_name(request.node.name) client.clear_database() return client
def main(): parser = argparse.ArgumentParser(epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument( '--address', help='hostname or ip address of target appliance', default=parse_if_not_none(env.get("base_url", None))) parser.add_argument( '--sdk_url', help='url to download sdk pkg', default=cfme_data.get("basic_info", {}).get("netapp_sdk_url", None)) parser.add_argument('--restart', help='restart evmserverd after installation ' + '(required for proper operation)', action="store_true") args = parser.parse_args() ssh_kwargs = { 'username': credentials['ssh']['username'], 'password': credentials['ssh']['password'], 'hostname': args.address } # Init SSH client client = SSHClient(**ssh_kwargs) # start filename = args.sdk_url.split('/')[-1] foldername = os.path.splitext(filename)[0] # download print 'Downloading sdk' status, out = client.run_command('wget {url} -O {file} > /root/unzip.out 2>&1'.format( url=args.sdk_url, file=filename)) # extract print 'Extracting sdk ({})'.format(filename) status, out = client.run_command('unzip -o -d /var/www/miq/vmdb/lib/ {}'.format(filename)) if status != 0: print out sys.exit(1) # install print 'Installing sdk ({})'.format(foldername) path = "/var/www/miq/vmdb/lib/{}/lib/linux-64".format(foldername) # Check if we haven't already added this line if client.run_command("grep -F '{}' /etc/default/evm".format(path))[0] != 0: status, out = client.run_command( 'echo "export LD_LIBRARY_PATH=\$LD_LIBRARY_PATH:{}" >> /etc/default/evm'.format(path)) if status != 0: print 'SDK installation failure (rc: {})'.format(out) print out sys.exit(1) else: print "Not needed to install, already done" print "Running ldconfig" client.run_command("ldconfig") print "Modifying YAML configuration" yaml = get_yaml_config("vmdb") yaml["product"]["storage"] = True set_yaml_config("vmdb", yaml) client.run_command("touch /var/www/miq/vmdb/HAS_NETAPP") # To mark that we installed netapp # service evmserverd restart if args.restart: print 'Appliance restart' status, out = client.run_command('reboot &') time.sleep(30) # To prevent clobbing with appliance shutting down print 'evmserverd restarted, the UI should start shortly.' else: print 'evmserverd must be restarted before netapp sdk can be used'
enabled: True dir: video display: ":99" quality: 10 """ import os import subprocess from signal import SIGINT from utils.conf import env # from utils.log import logger vid_options = env.get('logging', {}).get('video') def process_running(pid): """Check whether specified process is running""" try: os.kill(pid, 0) except OSError as e: if e.errno == 3: return False else: raise else: return True
enabled: True dir: video display: ":99" quality: 10 """ import os import os.path import pytest import re from utils.conf import env from utils.path import log_path from utils.video import Recorder vid_options = env.get('logging', {}).get('video') recorder = None def get_path_and_file_name(node): """Extract filename and location from the node. Args: node: py.test collection node to examine. Returns: 2-tuple `(path, filename)` """ vid_name = re.sub(r"[^a-zA-Z0-9_.\-\[\]]", "_", node.name) # Limit only sane characters vid_name = re.sub(r"[/]", "_", vid_name) # To be sure this guy doesn't get in vid_name = re.sub(r"__+", "_", vid_name) # Squash _'s to limit the length
enabled: True dir: video display: ":99" quality: 10 """ import os import subprocess from signal import SIGINT from utils.conf import env from utils.log import logger vid_options = env.get("logging", {}).get("video") def process_running(pid): """Check whether specified process is running""" try: os.kill(pid, 0) except OSError as e: if e.errno == 3: return False else: raise else: return True
def main(): parser = argparse.ArgumentParser( epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--address', help='hostname or ip address of target appliance', default=parse_if_not_none(env.get("base_url", None))) parser.add_argument('--sdk_url', help='url to download sdk pkg', default=cfme_data.get("basic_info", {}).get("netapp_sdk_url", None)) parser.add_argument('--restart', help='restart evmserverd after installation ' + '(required for proper operation)', action="store_true") args = parser.parse_args() ssh_kwargs = { 'username': credentials['ssh']['username'], 'password': credentials['ssh']['password'], 'hostname': args.address } # Init SSH client client = SSHClient(**ssh_kwargs) # start filename = args.sdk_url.split('/')[-1] foldername = os.path.splitext(filename)[0] # download print 'Downloading sdk' status, out = client.run_command( 'wget {url} -O {file} > /root/unzip.out 2>&1'.format(url=args.sdk_url, file=filename)) # extract print 'Extracting sdk ({})'.format(filename) status, out = client.run_command( 'unzip -o -d /var/www/miq/vmdb/lib/ {}'.format(filename)) if status != 0: print out sys.exit(1) # install print 'Installing sdk ({})'.format(foldername) path = "/var/www/miq/vmdb/lib/{}/lib/linux-64".format(foldername) # Check if we haven't already added this line if client.run_command( "grep -F '{}' /etc/default/evm".format(path))[0] != 0: status, out = client.run_command( 'echo "export LD_LIBRARY_PATH=\$LD_LIBRARY_PATH:{}" >> /etc/default/evm' .format(path)) if status != 0: print 'SDK installation failure (rc: {})'.format(out) print out sys.exit(1) else: print "Not needed to install, already done" print "Running ldconfig" client.run_command("ldconfig") print "Modifying YAML configuration" yaml = get_yaml_config("vmdb") yaml["product"]["storage"] = True set_yaml_config("vmdb", yaml) client.run_command("touch /var/www/miq/vmdb/HAS_NETAPP" ) # To mark that we installed netapp # service evmserverd restart if args.restart: print 'Appliance restart' status, out = client.run_command('reboot &') time.sleep(30) # To prevent clobbing with appliance shutting down print 'evmserverd restarted, the UI should start shortly.' else: print 'evmserverd must be restarted before netapp sdk can be used'
def listener_port(self): return env.get("event_listener", {}).get("port", None) or random_port()
return def terminate(self): return def task_status(self): return def __nonzero__(self): # DummyClient is always False, so it's easy to see if we have an artiactor client return False proc = None art_config = env.get('artifactor', {}) if art_config: # If server_port isn't set, pick a random port if 'server_port' not in art_config: port = random_port() art_config['server_port'] = port art_client = ArtifactorClient(art_config['server_address'], art_config['server_port']) else: art_client = DummyClient() SLAVEID = "" if env.get('slaveid', None): SLAVEID = env['slaveid']
from collections import OrderedDict import execnet import py import pytest from _pytest import runner from fixtures.terminalreporter import reporter from utils.conf import env, runtime from utils.log import create_sublogger _appliance_help = '''specify appliance URLs to use for distributed testing. this option can be specified more than once, and must be specified at least two times''' env_base_urls = env.get('parallel_base_urls', []) if env_base_urls: runtime['env']['base_url'] = env_base_urls[0] # Initialize slaveid to None, indicating this as the master process # slaves will set this to a unique string when they're initialized runtime['env']['slaveid'] def pytest_addoption(parser): group = parser.getgroup("cfme") group._addoption('--appliance', dest='appliances', action='append', default=env_base_urls, metavar='base_url', help=_appliance_help) # ------------------------------------------------------------------------- # distributed testing initialization