def disable_external_auth_openldap(): current_appliance = get_or_create_current_appliance() current_appliance.server.authentication.set_auth_mode() sssd_conf = '/etc/sssd/sssd.conf' httpd_auth = '/etc/pam.d/httpd-auth' manageiq_remoteuser = '******' manageiq_ext_auth = '/etc/httpd/conf.d/manageiq-external-auth.conf' command = 'rm -rf {} && rm -rf {} && rm -rf {} && rm -rf {}'.format( sssd_conf, httpd_auth, manageiq_ext_auth, manageiq_remoteuser) with current_appliance.ssh_client as ssh: assert ssh.run_command(command) ssh.run_command('systemctl restart evmserverd') get_or_create_current_appliance().wait_for_web_ui() current_appliance.server.logout()
def disable_external_auth_openldap(): current_appliance = get_or_create_current_appliance() current_appliance.server.authentication.set_auth_mode() sssd_conf = '/etc/sssd/sssd.conf' httpd_auth = '/etc/pam.d/httpd-auth' manageiq_remoteuser = '******' manageiq_ext_auth = '/etc/httpd/conf.d/manageiq-external-auth.conf' command = 'rm -rf {} && rm -rf {} && rm -rf {} && rm -rf {}'.format( sssd_conf, httpd_auth, manageiq_ext_auth, manageiq_remoteuser) with current_appliance.ssh_client as ssh: assert ssh.run_command(command) ssh.run_command('systemctl restart evmserverd') get_or_create_current_appliance().wait_for_web_ui() current_appliance.server.logout()
def simulate( instance=None, message=None, request=None, target_type=None, target_object=None, execute_methods=None, attributes_values=None, pre_clear=True, appliance=None): """Runs the simulation of specified Automate object.""" if not appliance: appliance = get_or_create_current_appliance() view = navigate_to(appliance.server, 'AutomateSimulation') if pre_clear: view.avp.clear() view.fill({ 'instance': 'Request', 'message': 'create', 'request': '', 'target_type': '<None>', 'execute_methods': True, }) view.fill({ 'instance': instance, 'message': message, 'request': request, 'target_type': target_type, 'target_object': target_object, 'execute_methods': execute_methods, 'avp': attributes_values, }) view.submit_button.click() view.flash.assert_no_error() view.flash.assert_message('Automation Simulation has been run')
def main(): parser = argparse.ArgumentParser( epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--address', help='hostname or ip address of target appliance', default=None) parser.add_argument('--vddk_url', help='url to download vddk pkg') parser.add_argument('--reboot', help='reboot after installation ' + '(required for proper operation)', action="store_true") parser.add_argument('--force', help='force installation if version detected', action="store_true") args = parser.parse_args() if not args.address: appliance = get_or_create_current_appliance() else: appliance = IPAppliance(hostname=urlparse(args.address).netloc) appliance.install_vddk(reboot=args.reboot, force=args.force, vddk_url=args.vddk_url, log_callback=log)
def test_ssui_login(context): appliance = get_or_create_current_appliance() with appliance.context.use(context): appliance.server.login() logged_in_page = appliance.browser.create_view(SSUIBaseLoggedInPage) assert logged_in_page.is_displayed logged_in_page.logout()
def get_template_from_config(template_config_name): """ Convenience function to grab the details for a template from the yamls and create template. """ template_config = conf.cfme_data.get('customization_templates', {})[template_config_name] script_data = load_data_file(str(project_path.join(template_config['script_file'])), replacements=template_config['replacements']) script_data = script_data.read() appliance = get_or_create_current_appliance() collection = appliance.collections.customization_templates customization_template = collection.instantiate(name=template_config['name'], description=template_config['description'], image_type=template_config['image_type'], script_type=template_config['script_type'], script_data=script_data) if not customization_template.exists(): return collection.create(name=template_config['name'], description=template_config['description'], image_type=template_config['image_type'], script_type=template_config['script_type'], script_data=script_data) return customization_template
def test_ssui_dashboard(setup_provider, context, order_catalog_item_in_ops_ui): """Tests various Primary and aggregate card values displayed on dashboard.""" appliance = get_or_create_current_appliance() with appliance.context.use(context): appliance.server.login() dashboard = Dashboard(appliance) dashboard.total_service()
def pytest_generate_tests(metafunc): new_idlist = [] new_argvalues = [] argnames, argvalues, idlist = testgen.providers_by_class( metafunc, PROVIDER_TYPES, required_fields=['datastores']) argnames += ['datastore'] appliance = get_or_create_current_appliance() # TODO: turn the datastore into a parameterized fixture by type, # and discuss semantics for obtaining them by type datastore_collection = datastore.DatastoreCollection(appliance) for i, argvalue_tuple in enumerate(argvalues): args = dict(zip(argnames, argvalue_tuple)) datastores = args['provider'].data.get('datastores', {}) if not datastores: continue for ds in datastores: if not ds.get('test_fleece', False): continue assert ds.get('type') in DATASTORE_TYPES,\ 'datastore type must be set to [{}] for smartstate analysis tests'\ .format('|'.join(DATASTORE_TYPES)) argvs = argvalues[i][:] new_argvalues.append(argvs + [ datastore_collection.instantiate( ds['name'], args['provider'].key, ds['type']) ]) test_id = '{}-{}'.format(args['provider'].key, ds['type']) new_idlist.append(test_id) testgen.parametrize(metafunc, argnames, new_argvalues, ids=new_idlist, scope="module")
def pytest_runtest_logreport(report): # e.g. test_tracking['test_name']['setup'] = 'passed' # test_tracking['test_name']['call'] = 'skipped' # test_tracking['test_name']['teardown'] = 'failed' yield test_tracking[_format_nodeid(report.nodeid, False)][report.when] = report.outcome if report.when == 'teardown': path, lineno, domaininfo = report.location test_status = _test_status(_format_nodeid(report.nodeid, False)) if test_status == "failed": appliance = get_or_create_current_appliance() try: logger().info( "Managed providers: {}".format( ", ".join([ prov.key for prov in appliance.managed_known_providers])) ) except KeyError as ex: if 'ext_management_systems' in ex.msg: logger().warning("Unable to query ext_management_systems table; DB issue") else: raise logger().info(log.format_marker('{} result: {}'.format(_format_nodeid(report.nodeid), test_status)), extra={'source_file': path, 'source_lineno': lineno}) if report.outcome == "skipped": logger().info(log.format_marker(report.longreprtext))
def pytest_runtest_teardown(item, nextitem): name, location = get_test_idents(item) app = get_or_create_current_appliance() ip = app.address fire_art_test_hook(item, 'finish_test', slaveid=store.slaveid, ip=ip, wait_for_task=True) fire_art_test_hook(item, 'sanitize', words=words) jenkins_data = { 'build_url': os.environ.get('BUILD_URL'), 'build_number': os.environ.get('BUILD_NUMBER'), 'git_commit': os.environ.get('GIT_COMMIT'), 'job_name': os.environ.get('JOB_NAME') } try: caps = app.browser.widgetastic.selenium.capabilities param_dict = { 'browserName': caps['browserName'], 'browserPlatform': caps['platform'], 'browserVersion': caps['version'] } except Exception as e: logger.error(e) param_dict = None fire_art_test_hook(item, 'ostriz_send', env_params=param_dict, slaveid=store.slaveid, polarion_ids=extract_polarion_ids(item), jenkins=jenkins_data)
def test_check_compliance_history(request, virtualcenter_provider, vmware_vm, policy_collection, policy_profile_collection): """This test checks if compliance history link in a VM details screen work. Steps: * Create any VM compliance policy * Assign it to a policy profile * Assign the policy profile to any VM * Perform the compliance check for the VM * Go to the VM details screen * Click on "History" row in Compliance InfoBox Result: Compliance history screen with last 10 checks should be opened """ policy = policy_collection.create( VMCompliancePolicy, "Check compliance history policy {}".format(fauxfactory.gen_alpha()), active=True, scope="fill_field(VM and Instance : Name, INCLUDES, {})".format(vmware_vm.name) ) request.addfinalizer(lambda: policy.delete() if policy.exists else None) policy_profile = policy_profile_collection.create(policy.description, policies=[policy]) request.addfinalizer(lambda: policy_profile.delete() if policy_profile.exists else None) virtualcenter_provider.assign_policy_profiles(policy_profile.description) request.addfinalizer(lambda: virtualcenter_provider.unassign_policy_profiles( policy_profile.description)) vmware_vm.check_compliance() vmware_vm.open_details(["Compliance", "History"]) appliance = get_or_create_current_appliance() history_screen_title = Text(appliance.browser.widgetastic, "//span[@id='explorer_title_text']").text assert history_screen_title == '"Compliance History" for Virtual Machine "{}"'.format( vmware_vm.name)
def pytest_generate_tests(metafunc): """The following lines generate appliance versions based from the current build. Appliance version is split and minor_build is picked out for generating each version and appending it to the empty versions list""" versions = [] version = get_or_create_current_appliance().version split_ver = str(version).split(".") try: minor_build = split_ver[2] assert int(minor_build) != 0 except IndexError: logger.exception( 'Caught IndexError generating for test_appliance_update, skipping') except AssertionError: logger.debug( 'Caught AssertionError: No previous z-stream version to update from' ) versions.append( pytest.param( "bad:{!r}".format(version), marks=pytest.mark.uncollect( 'Could not parse minor_build version from: {}'.format( version)))) else: for i in range(int(minor_build) - 1, -1, -1): versions.append("{}.{}.{}".format(split_ver[0], split_ver[1], i)) metafunc.parametrize('old_version', versions, indirect=True)
def test_check_compliance_history(request, virtualcenter_provider, vmware_vm, policy_collection, policy_profile_collection): """This test checks if compliance history link in a VM details screen work. Steps: * Create any VM compliance policy * Assign it to a policy profile * Assign the policy profile to any VM * Perform the compliance check for the VM * Go to the VM details screen * Click on "History" row in Compliance InfoBox Result: Compliance history screen with last 10 checks should be opened """ policy = policy_collection.create( VMCompliancePolicy, "Check compliance history policy {}".format(fauxfactory.gen_alpha()), active=True, scope="fill_field(VM and Instance : Name, INCLUDES, {})".format(vmware_vm.name) ) request.addfinalizer(lambda: policy.delete() if policy.exists else None) policy_profile = policy_profile_collection.create(policy.description, policies=[policy]) request.addfinalizer(lambda: policy_profile.delete() if policy_profile.exists else None) virtualcenter_provider.assign_policy_profiles(policy_profile.description) request.addfinalizer(lambda: virtualcenter_provider.unassign_policy_profiles( policy_profile.description)) vmware_vm.check_compliance() vmware_vm.open_details(["Compliance", "History"]) appliance = get_or_create_current_appliance() history_screen_title = Text(appliance.browser.widgetastic, "//span[@id='explorer_title_text']").text assert history_screen_title == '"Compliance History" for Virtual Machine "{}"'.format( vmware_vm.name)
def pytest_runtest_logreport(report): # e.g. test_tracking['test_name']['setup'] = 'passed' # test_tracking['test_name']['call'] = 'skipped' # test_tracking['test_name']['teardown'] = 'failed' yield test_tracking[_format_nodeid(report.nodeid, False)][report.when] = report.outcome if report.when == 'teardown' and pytest.store.parallel_session is None: path, lineno, domaininfo = report.location test_status = _test_status(_format_nodeid(report.nodeid, False)) if test_status == "failed": appliance = get_or_create_current_appliance() try: logger().info( "Managed providers: {}".format( ", ".join([ prov.key for prov in appliance.managed_known_providers])) ) except KeyError as ex: if 'ext_management_systems' in ex.msg: logger().warning("Unable to query ext_management_systems table; DB issue") else: raise logger().info(log.format_marker('{} result: {}'.format(_format_nodeid(report.nodeid), test_status)), extra={'source_file': path, 'source_lineno': lineno}) if report.outcome == "skipped": logger().info(log.format_marker(report.longreprtext))
def main(): parser = argparse.ArgumentParser( epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--address', help='hostname or ip address of target appliance', default=None) parser.add_argument('--sdk_url', help='url to download sdk pkg', default=cfme_data.get("basic_info", {}).get("netapp_sdk_url")) parser.add_argument('--restart', help='restart evmserverd after installation ' + '(required for proper operation)', action="store_true") args = parser.parse_args() if not args.address: appliance = get_or_create_current_appliance() else: appliance = IPAppliance(address=args.address) print('Address: {}'.format(appliance.address)) print('SDK URL: {}'.format(args.sdk_url)) print('Restart: {}'.format(args.restart)) appliance.install_netapp_sdk(sdk_url=args.sdk_url, reboot=args.restart, log_callback=log)
def setup_external_auth_openldap(**data): """Sets up the appliance for an external authentication with OpenLdap. Keywords: get_groups: Get User Groups from External Authentication (httpd). ipaserver: IPA server address. iparealm: Realm. credentials: Key of the credential in credentials.yaml """ connect_kwargs = { 'username': credentials['host_default']['username'], 'password': credentials['host_default']['password'], 'hostname': data['ipaddress'], } current_appliance = get_or_create_current_appliance() appliance_name = 'cfmeappliance{}'.format(fauxfactory.gen_alpha(7).lower()) appliance_address = current_appliance.hostname appliance_fqdn = '{}.{}'.format(appliance_name, data['domain_name']) with SSHClient(**connect_kwargs) as ldapserver_ssh: # updating the /etc/hosts is a workaround due to the # https://bugzilla.redhat.com/show_bug.cgi?id=1360928 command = 'echo "{}\t{}" >> /etc/hosts'.format(appliance_address, appliance_fqdn) ldapserver_ssh.run_command(command) ldapserver_ssh.get_file(remote_file=data['cert_filepath'], local_path=conf_path.strpath) ensure_browser_open() current_appliance.server.login_admin() current_appliance.server.authentication.set_auth_mode(mode='external', get_groups=data.pop( "get_groups", True)) current_appliance.configure_appliance_for_openldap_ext_auth(appliance_fqdn) current_appliance.server.logout()
def get_template_from_config(template_config_name, create=False): """ Convenience function to grab the details for a template from the yamls and create template. """ template_config = conf.cfme_data.get('customization_templates', {})[template_config_name] script_data = load_data_file(str( project_path.join(template_config['script_file'])), replacements=template_config['replacements']) script_data = script_data.read() appliance = get_or_create_current_appliance() collection = appliance.collections.customization_templates kwargs = { 'name': template_config['name'], 'description': template_config['description'], 'image_type': template_config['image_type'], 'script_type': template_config['script_type'], 'script_data': script_data } customization_template = collection.instantiate(**kwargs) if create and not customization_template.exists(): return collection.create(**kwargs) return customization_template
def pytest_generate_tests(metafunc): new_idlist = [] new_argvalues = [] argnames, argvalues, idlist = testgen.providers_by_class( metafunc, PROVIDER_TYPES, required_fields=['datastores']) argnames += ['datastore'] appliance = get_or_create_current_appliance() # TODO: turn the datastore into a parameterized fixture by type, # and discuss semantics for obtaining them by type datastore_collection = datastore.DatastoreCollection(appliance) for i, argvalue_tuple in enumerate(argvalues): args = dict(zip(argnames, argvalue_tuple)) datastores = args['provider'].data.get('datastores', {}) if not datastores: continue for ds in datastores: if not ds.get('test_fleece', False): continue assert ds.get('type') in DATASTORE_TYPES,\ 'datastore type must be set to [{}] for smartstate analysis tests'\ .format('|'.join(DATASTORE_TYPES)) argvs = argvalues[i][:] new_argvalues.append(argvs + [datastore_collection.instantiate( ds['name'], args['provider'].key, ds['type'])]) test_id = '{}-{}'.format(args['provider'].key, ds['type']) new_idlist.append(test_id) testgen.parametrize(metafunc, argnames, new_argvalues, ids=new_idlist, scope="module")
def pytest_runtest_logreport(self, report): """pytest runtest logreport hook - sends serialized log reports to the master """ self.send_event("runtest_logreport", report=serialize_report(report)) if report.when == 'teardown': path, lineno, domaininfo = report.location test_status = _test_status(_format_nodeid(report.nodeid, False)) if test_status == "failed": appliance = get_or_create_current_appliance() try: self.log.info( "Managed providers: {}".format( ", ".join([ prov.key for prov in appliance.managed_known_providers])) ) except KeyError as ex: if 'ext_management_systems' in ex.msg: self.log.warning("Unable to query ext_management_systems table; DB issue") else: raise self.log.info(log.format_marker('{} result: {}'.format(_format_nodeid(report.nodeid), test_status)), extra={'source_file': path, 'source_lineno': lineno}) if report.outcome == "skipped": self.log.info(log.format_marker(report.longreprtext))
def setup_external_auth_openldap(**data): """Sets up the appliance for an external authentication with OpenLdap. Keywords: get_groups: Get User Groups from External Authentication (httpd). ipaserver: IPA server address. iparealm: Realm. credentials: Key of the credential in credentials.yaml """ connect_kwargs = { 'username': credentials['host_default']['username'], 'password': credentials['host_default']['password'], 'hostname': data['ipaddress'], } current_appliance = get_or_create_current_appliance() appliance_name = 'cfmeappliance{}'.format(fauxfactory.gen_alpha(7).lower()) appliance_address = current_appliance.hostname appliance_fqdn = '{}.{}'.format(appliance_name, data['domain_name']) with SSHClient(**connect_kwargs) as ldapserver_ssh: # updating the /etc/hosts is a workaround due to the # https://bugzilla.redhat.com/show_bug.cgi?id=1360928 command = 'echo "{}\t{}" >> /etc/hosts'.format(appliance_address, appliance_fqdn) ldapserver_ssh.run_command(command) ldapserver_ssh.get_file(remote_file=data['cert_filepath'], local_path=conf_path.strpath) ensure_browser_open() current_appliance.server.login_admin() current_appliance.server.authentication.set_auth_mode( mode='external', get_groups=data.pop("get_groups", True) ) current_appliance.configure_appliance_for_openldap_ext_auth(appliance_fqdn) current_appliance.server.logout()
def simulate(instance=None, message=None, request=None, target_type=None, target_object=None, execute_methods=None, attributes_values=None, pre_clear=True, appliance=None): """Runs the simulation of specified Automate object.""" if not appliance: appliance = get_or_create_current_appliance() view = navigate_to(appliance.server, 'AutomateSimulation') if pre_clear: view.avp.clear() view.fill({ 'instance': 'Request', 'message': 'create', 'request': '', 'target_type': '<None>', 'execute_methods': True, }) view.fill({ 'instance': instance, 'message': message, 'request': request, 'target_type': target_type, 'target_object': target_object, 'execute_methods': execute_methods, 'avp': attributes_values, }) view.submit_button.click() view.flash.assert_no_error() view.flash.assert_message('Automation Simulation has been run')
def pytest_configure(config): art_client = get_client(art_config=env.get('artifactor', {}), pytest_config=config) # just in case if not store.slave_manager: with diaper: atexit.register(shutdown, config) if art_client: config._art_proc = spawn_server(config, art_client) wait_for(net_check, func_args=[art_client.port, '127.0.0.1'], func_kwargs={'force': True}, num_sec=10, message="wait for artifactor to start") art_client.ready = True else: config._art_proc = None from cfme.utils.log import artifactor_handler artifactor_handler.artifactor = art_client if store.slave_manager: artifactor_handler.slaveid = store.slaveid config._art_client = art_client art_client.fire_hook('setup_merkyl', ip=get_or_create_current_appliance().address)
def pytest_runtest_logreport(self, report): """pytest runtest logreport hook - sends serialized log reports to the master """ self.send_event("runtest_logreport", report=serialize_report(report)) if report.when == 'teardown': path, lineno, domaininfo = report.location test_status = _test_status(_format_nodeid(report.nodeid, False)) if test_status == "failed": appliance = get_or_create_current_appliance() try: self.log.info("Managed providers: {}".format(", ".join([ prov.key for prov in appliance.managed_known_providers ]))) except KeyError as ex: if 'ext_management_systems' in ex.msg: self.log.warning( "Unable to query ext_management_systems table; DB issue" ) else: raise self.log.info(log.format_marker('{} result: {}'.format( _format_nodeid(report.nodeid), test_status)), extra={ 'source_file': path, 'source_lineno': lineno }) if report.outcome == "skipped": self.log.info(log.format_marker(report.longreprtext))
def get_appliance(appliance_ip): """Checks an appliance is not None and if so, loads the appropriate things""" from cfme.utils.appliance import IPAppliance, get_or_create_current_appliance if not appliance_ip: app = get_or_create_current_appliance() else: app = IPAppliance(hostname=appliance_ip) return app
def test_myservice_crud(setup_provider, context, order_catalog_item_in_ops_ui): """Tests Myservice crud in SSUI.""" appliance = get_or_create_current_appliance() service_name = order_catalog_item_in_ops_ui with appliance.context.use(context): appliance.server.login() myservice = MyService(appliance, service_name) myservice.update({'name': '{}_edited'.format(service_name)})
def get_appliance(appliance_ip): """Checks an appliance is not None and if so, loads the appropriate things""" from cfme.utils.appliance import IPAppliance, get_or_create_current_appliance if not appliance_ip: app = get_or_create_current_appliance() else: app = IPAppliance(hostname=appliance_ip) return app
def disable_external_auth_ipa(): """Unconfigure external auth.""" current_appliance = get_or_create_current_appliance() with current_appliance.ssh_client as ssh: ensure_browser_open() current_appliance.server.login_admin() current_appliance.server.authentication.set_auth_mode() assert ssh.run_command("appliance_console_cli --uninstall-ipa") current_appliance.wait_for_web_ui() current_appliance.server.logout()
def get_all_vms(do_not_navigate=False): """Returns list of all vms on current page""" if do_not_navigate: from cfme.utils.appliance import get_or_create_current_appliance app = get_or_create_current_appliance() view = app.browser.create_view(navigator.get_class(Vm, 'VMsOnly').VIEW) else: view = navigate_to(Vm, 'VMsOnly') return [entity.name for entity in view.entities.get_all()]
def __init__(self, request): """ A simple adapter to aid in Merkyl Log Inspection during a test. This class is really only useful during a test and is designed to abstract away accessing the request object. The hooks which are fired can be done so during the test without this class/fixture, this is merely a convenience and does nothing special. """ self.node = request.node self.ip = get_or_create_current_appliance().hostname
def __init__(self, request): """ A simple adapter to aid in Merkyl Log Inspection during a test. This class is really only useful during a test and is designed to abstract away accessing the request object. The hooks which are fired can be done so during the test without this class/fixture, this is merely a convenience and does nothing special. """ self.node = request.node self.ip = get_or_create_current_appliance().address
def disable_external_auth_ipa(): """Unconfigure external auth.""" current_appliance = get_or_create_current_appliance() with current_appliance.ssh_client as ssh: ensure_browser_open() current_appliance.server.login_admin() current_appliance.server.authentication.set_auth_mode() assert ssh.run_command("appliance_console_cli --uninstall-ipa") current_appliance.wait_for_web_ui() current_appliance.server.logout()
def pytest_runtest_protocol(item): global session_ver global session_build global session_stream if not session_ver: session_ver = str(version.current_version()) session_build = store.current_appliance.build session_stream = store.current_appliance.version.stream() if str(session_ver) not in session_build: session_build = "{}-{}".format(str(session_ver), session_build) fire_art_hook(item.config, 'session_info', version=session_ver, build=session_build, stream=session_stream) tier = item.get_marker('tier') if tier: tier = tier.args[0] requirement = item.get_marker('requirement') if requirement: requirement = requirement.args[0] try: params = item.callspec.params param_dict = {p: get_name(v) for p, v in params.iteritems()} except: param_dict = {} ip = get_or_create_current_appliance().address # This pre_start_test hook is needed so that filedump is able to make get the test # object set up before the logger starts logging. As the logger fires a nested hook # to the filedumper, and we can't specify order inriggerlib. meta = item.get_marker('meta') if meta and 'blockers' in meta.kwargs: blocker_spec = meta.kwargs['blockers'] blockers = [] for blocker in blocker_spec: if isinstance(blocker, int): blockers.append(BZ(blocker).url) else: blockers.append(Blocker.parse(blocker).url) else: blockers = [] fire_art_test_hook(item, 'pre_start_test', slaveid=store.slaveid, ip=ip) fire_art_test_hook(item, 'start_test', slaveid=store.slaveid, ip=ip, tier=tier, requirement=requirement, param_dict=param_dict, issues=blockers) yield
def pytest_runtest_setup(item): from cfme.utils.appliance import ( get_or_create_current_appliance, DummyAppliance, ) appliance = get_or_create_current_appliance() if isinstance(appliance, DummyAppliance): return if set(getattr(item, 'fixturenames', [])) & browser_fixtures: cfme.utils.browser.ensure_browser_open()
def handle_assert_artifacts(request, fail_message=None): appliance = get_or_create_current_appliance() if isinstance(appliance, DummyAppliance): return if not fail_message: short_tb = '{}'.format(sys.exc_info()[1]) short_tb = short_tb.encode('base64') var_tb = traceback.format_tb(sys.exc_info()[2]) full_tb = "".join(var_tb) full_tb = full_tb.encode('base64') else: short_tb = full_tb = fail_message.encode('base64') try: ss = cfme.utils.browser.browser().get_screenshot_as_base64() ss_error = None except Exception as b_ex: ss = None if str(b_ex): ss_error = '{}: {}'.format(type(b_ex).__name__, str(b_ex)) else: ss_error = type(b_ex).__name__ if ss_error: ss_error = ss_error.encode('base64') # A simple id to match the artifacts together sa_id = "softassert-{}".format(fauxfactory.gen_alpha(length=3).upper()) from fixtures.pytest_store import store node = request.node fire_art_test_hook( node, 'filedump', description="Soft Assert Traceback", contents=full_tb, file_type="soft_traceback", display_type="danger", display_glyph="align-justify", contents_base64=True, group_id=sa_id, slaveid=store.slaveid) fire_art_test_hook( node, 'filedump', description="Soft Assert Short Traceback", contents=short_tb, file_type="soft_short_tb", display_type="danger", display_glyph="align-justify", contents_base64=True, group_id=sa_id, slaveid=store.slaveid) if ss is not None: fire_art_test_hook( node, 'filedump', description="Soft Assert Exception screenshot", file_type="screenshot", mode="wb", contents_base64=True, contents=ss, display_glyph="camera", group_id=sa_id, slaveid=store.slaveid) if ss_error is not None: fire_art_test_hook( node, 'filedump', description="Soft Assert Screenshot error", mode="w", contents_base64=True, contents=ss_error, display_type="danger", group_id=sa_id, slaveid=store.slaveid)
def setup_external_auth_ipa(**data): """Sets up the appliance for an external authentication with IPA. Keywords: get_groups: Get User Groups from External Authentication (httpd). ipaserver: IPA server address. iparealm: Realm. credentials: Key of the credential in credentials.yaml """ connect_kwargs = { 'username': credentials['host_default']['username'], 'password': credentials['host_default']['password'], 'hostname': data['ipaserver'], } current_appliance = get_or_create_current_appliance() appliance_name = 'cfmeappliance{}'.format(fauxfactory.gen_alpha(7).lower()) appliance_address = current_appliance.address appliance_fqdn = '{}.{}'.format(appliance_name, data['iparealm'].lower()) with SSHClient(**connect_kwargs) as ipaserver_ssh: ipaserver_ssh.run_command('cp /etc/hosts /etc/hosts_bak') ipaserver_ssh.run_command( "sed -i -r '/^{}/d' /etc/hosts".format(appliance_address)) command = 'echo "{}\t{}" >> /etc/hosts'.format(appliance_address, appliance_fqdn) ipaserver_ssh.run_command(command) with current_appliance.ssh_client as ssh: result = ssh.run_command( 'appliance_console_cli --host {}'.format(appliance_fqdn)).success if not current_appliance.is_pod: assert result else: # appliance_console_cli fails when calls hostnamectl --host. it seems docker issue # raise BZ ? assert str(ssh.run_command('hostname')).rstrip() == appliance_fqdn ensure_browser_open() current_appliance.server.login_admin() if data["ipaserver"] not in ( current_appliance.server.settings.ntp_servers_form.values()): current_appliance.server.settings.update_ntp_servers( {'ntp_server_1': data["ipaserver"]}) sleep(120) appliance.server.authentication.set_auth_mode(mode='external', get_groups=data.pop( "get_groups", False)) creds = credentials.get(data.pop("credentials"), {}) data.update(**creds) assert ssh.run_command( "appliance_console_cli --ipaserver {ipaserver} --iparealm {iparealm} " "--ipaprincipal {principal} --ipapassword {password}".format( **data)) current_appliance.server.login_admin()
def setup_external_auth_ipa(**data): """Sets up the appliance for an external authentication with IPA. Keywords: get_groups: Get User Groups from External Authentication (httpd). ipaserver: IPA server address. iparealm: Realm. credentials: Key of the credential in credentials.yaml """ connect_kwargs = { 'username': credentials['host_default']['username'], 'password': credentials['host_default']['password'], 'hostname': data['ipaserver'], } current_appliance = get_or_create_current_appliance() appliance_name = 'cfmeappliance{}'.format(fauxfactory.gen_alpha(7).lower()) appliance_address = current_appliance.hostname appliance_fqdn = '{}.{}'.format(appliance_name, data['iparealm'].lower()) with SSHClient(**connect_kwargs) as ipaserver_ssh: ipaserver_ssh.run_command('cp /etc/hosts /etc/hosts_bak') ipaserver_ssh.run_command("sed -i -r '/^{}/d' /etc/hosts".format(appliance_address)) command = 'echo "{}\t{}" >> /etc/hosts'.format(appliance_address, appliance_fqdn) ipaserver_ssh.run_command(command) with current_appliance.ssh_client as ssh: result = ssh.run_command('appliance_console_cli --host {}'.format(appliance_fqdn)).success if not current_appliance.is_pod: assert result else: # appliance_console_cli fails when calls hostnamectl --host. it seems docker issue # raise BZ ? assert str(ssh.run_command('hostname')).rstrip() == appliance_fqdn ensure_browser_open() current_appliance.server.login_admin() if data["ipaserver"] not in ( current_appliance.server.settings.ntp_servers_values): current_appliance.server.settings.update_ntp_servers( {'ntp_server_1': data["ipaserver"]}) sleep(120) current_appliance.server.authentication.set_auth_mode( mode='external', get_groups=data.pop("get_groups", False) ) creds = credentials.get(data.pop("credentials"), {}) data.update(**creds) assert ssh.run_command( "appliance_console_cli --ipaserver {ipaserver} --iparealm {iparealm} " "--ipaprincipal {principal} --ipapassword {password}".format(**data) ) current_appliance.server.login_admin()
def shutdown(config): with lock: proc = config._art_proc if proc: if not store.slave_manager: write_line('collecting artifacts') fire_art_hook(config, 'finish_session') fire_art_hook(config, 'teardown_merkyl', ip=get_or_create_current_appliance().address) if not store.slave_manager: config._art_client.terminate() proc = config._art_proc if proc: proc.wait()
def _method_setup(vm_names, provider_crud=None): """ Reduces some redundant code shared between methods """ if isinstance(vm_names, basestring): vm_names = [vm_names] if provider_crud: provider_crud.load_all_provider_vms() from cfme.utils.appliance import get_or_create_current_appliance app = get_or_create_current_appliance() view = app.browser.create_view(navigator.get_class(Vm, 'VMsOnly').VIEW) else: view = navigate_to(Vm, 'VMsOnly') if view.entities.paginator.exists: view.entities.paginator.set_items_per_page(1000) for vm_name in vm_names: view.entities.get_entity(vm_name).check()
def add_server_roles(server_roles, server_roles_mode="add"): # Disable all server roles # and then figure out which ones should be enabled from cfme.utils.appliance import get_or_create_current_appliance current_appliance = get_or_create_current_appliance() server_settings = current_appliance.server.settings roles_with_vals = {k: False for k in available_roles} if server_roles is None: # Only user interface roles_with_vals['user_interface'] = True elif server_roles == "default": # The ones specified in YAML roles_list = cfme_data["server_roles"]["sets"]["default"] roles_with_vals.update({k: True for k in roles_list}) elif server_roles_mode == "add": # The ones that are already enabled and enable/disable the ones specified # -server_role, +server_role or server_role roles_with_vals = server_settings.server_roles_db if isinstance(server_roles, basestring): server_roles = server_roles.split(' ') for role in server_roles: if role.startswith('-'): roles_with_vals[role[1:]] = False elif role.startswith('+'): roles_with_vals[role[1:]] = True else: roles_with_vals[role] = True elif server_roles_mode == "cfmedata": roles_list = cfme_data # Drills down into cfme_data YAML by selector, expecting a list # of roles at the end. A KeyError here probably means the YAML # selector is wrong for selector in server_roles: roles_list = roles_list[selector] roles_with_vals.update({k: True for k in roles_list}) else: raise Exception('No server role changes defined.') if not available_roles.issuperset(set(roles_with_vals)): unknown_roles = ', '.join(set(roles_with_vals) - available_roles) raise Exception('Unknown server role(s): {}'.format(unknown_roles)) server_settings.update_server_roles_db(roles_with_vals)
def add_server_roles(server_roles, server_roles_mode="add"): # Disable all server roles # and then figure out which ones should be enabled from cfme.utils.appliance import get_or_create_current_appliance current_appliance = get_or_create_current_appliance() server_settings = current_appliance.server.settings roles_with_vals = {k: False for k in available_roles} if server_roles is None: # Only user interface roles_with_vals['user_interface'] = True elif server_roles == "default": # The ones specified in YAML roles_list = cfme_data["server_roles"]["sets"]["default"] roles_with_vals.update({k: True for k in roles_list}) elif server_roles_mode == "add": # The ones that are already enabled and enable/disable the ones specified # -server_role, +server_role or server_role roles_with_vals = server_settings.server_roles_db if isinstance(server_roles, basestring): server_roles = server_roles.split(' ') for role in server_roles: if role.startswith('-'): roles_with_vals[role[1:]] = False elif role.startswith('+'): roles_with_vals[role[1:]] = True else: roles_with_vals[role] = True elif server_roles_mode == "cfmedata": roles_list = cfme_data # Drills down into cfme_data YAML by selector, expecting a list # of roles at the end. A KeyError here probably means the YAML # selector is wrong for selector in server_roles: roles_list = roles_list[selector] roles_with_vals.update({k: True for k in roles_list}) else: raise Exception('No server role changes defined.') if not available_roles.issuperset(set(roles_with_vals)): unknown_roles = ', '.join(set(roles_with_vals) - available_roles) raise Exception('Unknown server role(s): {}'.format(unknown_roles)) server_settings.update_server_roles_db(roles_with_vals)
def get_template_from_config(template_config_name): """ Convenience function to grab the details for a template from the yamls. """ template_config = conf.cfme_data.get('customization_templates', {})[template_config_name] script_data = load_data_file(str( project_path.join(template_config['script_file'])), replacements=template_config['replacements']) script_data = script_data.read() appliance = get_or_create_current_appliance() collection = appliance.collections.customization_templates return collection.instantiate(name=template_config['name'], description=template_config['description'], image_type=template_config['image_type'], script_type=template_config['script_type'], script_data=script_data)
def main(): parser = argparse.ArgumentParser(epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument( '--address', help='hostname or ip address of target appliance', default=None) parser.add_argument('--vddk_url', help='url to download vddk pkg') parser.add_argument('--reboot', help='reboot after installation ' + '(required for proper operation)', action="store_true") parser.add_argument('--force', help='force installation if version detected', action="store_true") args = parser.parse_args() if not args.address: appliance = get_or_create_current_appliance() else: appliance = IPAppliance(hostname=urlparse(args.address).netloc) appliance.install_vddk( reboot=args.reboot, force=args.force, vddk_url=args.vddk_url, log_callback=log)
def pytest_generate_tests(metafunc): """The following lines generate appliance versions based from the current build. Appliance version is split and minor_build is picked out for generating each version and appending it to the empty versions list""" versions = [] version = get_or_create_current_appliance().version split_ver = str(version).split(".") try: minor_build = split_ver[2] assert int(minor_build) != 0 except IndexError: logger.exception('Caught IndexError generating for test_appliance_update, skipping') except AssertionError: logger.debug('Caught AssertionError: No previous z-stream version to update from') versions.append(pytest.param("bad:{!r}".format(version), marks=pytest.mark.uncollect( 'Could not parse minor_build version from: {}'.format(version) ))) else: for i in range(int(minor_build) - 1, -1, -1): versions.append("{}.{}.{}".format(split_ver[0], split_ver[1], i)) metafunc.parametrize('old_version', versions, indirect=True)
def main(): parser = argparse.ArgumentParser(epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument( '--address', help='hostname or ip address of target appliance', default=None) parser.add_argument( '--sdk_url', help='url to download sdk pkg', default=cfme_data.get("basic_info", {}).get("netapp_sdk_url")) parser.add_argument('--restart', help='restart evmserverd after installation ' + '(required for proper operation)', action="store_true") args = parser.parse_args() if not args.address: appliance = get_or_create_current_appliance() else: appliance = IPAppliance(hostname=args.address) print('Address: {}'.format(appliance.hostname)) print('SDK URL: {}'.format(args.sdk_url)) print('Restart: {}'.format(args.restart)) appliance.install_netapp_sdk(sdk_url=args.sdk_url, reboot=args.restart, log_callback=log)
def new_paginator(): """ Simple function to avoid module level import """ appliance = get_or_create_current_appliance() paginator = PaginationPane(parent=appliance.browser.widgetastic) return paginator
parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument( '--url', default=None, help= "URL of the target appliance, default pulled from local environment conf" ) parser.add_argument('--logfile', metavar='FILE', default=os.path.join(log_path.strpath, 'cfme.log'), help="path to cfme log file, default: %(default)s") args = parser.parse_args() appliance_url = args.url or get_or_create_current_appliance().url # we are really not interested in any warnings and "warnings.simplefilter('ignore')" # doesn't work when it's redefined later in the REST API client warnings.showwarning = lambda *args, **kwargs: None api = MiqApi('{}/api'.format(appliance_url.rstrip('/')), (conf.credentials['default']['username'], conf.credentials['default']['password']), verify_ssl=False) print(f"Appliance URL: {appliance_url}") store = {} get_collections_info(api, store)
if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument( '--url', default=None, help="URL of the target appliance, default pulled from local environment conf") parser.add_argument( '--logfile', metavar='FILE', default=os.path.join(log_path.strpath, 'cfme.log'), help="path to cfme log file, default: %(default)s") args = parser.parse_args() appliance_url = args.url or get_or_create_current_appliance().url # we are really not interested in any warnings and "warnings.simplefilter('ignore')" # doesn't work when it's redefined later in the REST API client warnings.showwarning = lambda *args, **kwargs: None api = MiqApi( '{}/api'.format(appliance_url.rstrip('/')), (conf.credentials['default']['username'], conf.credentials['default']['password']), verify_ssl=False) print("Appliance URL: {}".format(appliance_url)) store = {} get_collections_info(api, store)
def new_paginator(): """ Simple function to avoid module level import """ appliance = get_or_create_current_appliance() paginator = PaginationPane(parent=appliance.browser.widgetastic) return paginator
def appliance(): return get_or_create_current_appliance()