def compute_coverage(branch): coverage_data = CoverageData() try: with project_path.join('.coverage').open() as fp: coverage_data.read_file(fp) except Exception: print("No coverage data found", file=sys.stderr) git_proc = subprocess.Popen(['git', 'diff', '-U0', branch], stdout=subprocess.PIPE, stderr=subprocess.PIPE) git_output = git_proc.stdout.read() files = git_output.split("diff --git") from collections import defaultdict file_data = defaultdict(list) for the_file in files: filenames = re.findall('a/(.*?) b/(.*)', the_file) if not filenames: continue filename = project_path.join(filenames[0][1]) if '.py' != filename.ext: continue the_file += "git_output_checker" the_diffs = re.findall( r'(@@.*?@@.*?(?=@@|git_output_checker))', the_file, re.M | re.S, ) for diff in the_diffs: diff_args = re.match(r'@@ -(\d+)(,(\d+))*\s+\+(\d+)(,(\d+))*', diff).groups() if diff_args[5]: for extra_line in range(int(diff_args[5])): file_data[filename].append(extra_line + int(diff_args[3])) else: file_data[filename].append(int(diff_args[3])) line_count = 0 completed_lines = 0 for file_changed, lines in file_data.items(): for line in lines: line_count += 1 used_lines = coverage_data.lines(file_changed) if not used_lines: continue if isinstance(used_lines, int): used_lines = set([used_lines]) else: used_lines = set(used_lines) if line in used_lines: completed_lines += 1 return float(completed_lines) / line_count * 100
def setup(sphinx): """Main sphinx entry point, calls sphinx-apidoc""" for module in modules_to_document: module_path = project_path.join(module).strpath tests_exclude_path = project_path.join(module, 'tests').strpath output_module_path = _doc_modules_path.join(module).strpath # Shove stdout into a pipe to supress the output, but still let stderr out args = ['sphinx-apidoc', '-T', '-e', '-o', output_module_path, module_path, tests_exclude_path] proc = subprocess.Popen(args, stdout=subprocess.PIPE) proc.wait() sphinx.add_config_value('clean_autogenerated_docs', False, rebuild='') sphinx.connect('build-finished', purge_module_apidoc)
def setup(sphinx): """Main sphinx entry point, calls sphinx-apidoc""" for module in modules_to_document: module_path = project_path.join(module).strpath tests_exclude_path = project_path.join(module, 'tests').strpath output_module_path = _doc_modules_path.join(module).strpath args = [ '-T', '-e', '-o', output_module_path, module_path, tests_exclude_path ] apidoc.main(args) sphinx.add_config_value('clean_autogenerated_docs', False, rebuild='') sphinx.connect('build-finished', purge_module_apidoc)
def compute_coverage(branch): coverage_data = CoverageData() try: with project_path.join('.coverage').open() as fp: coverage_data.read_file(fp) except Exception: print("No coverage data found", file=sys.stderr) git_proc = subprocess.Popen(['git', 'diff', '-U0', branch], stdout=subprocess.PIPE, stderr=subprocess.PIPE) git_output = git_proc.stdout.read() files = git_output.split("diff --git") from collections import defaultdict file_data = defaultdict(list) for the_file in files: filenames = re.findall('a/(.*?) b/(.*)', the_file) if not filenames: continue filename = project_path.join(filenames[0][1]) if '.py' != filename.ext: continue the_file += "git_output_checker" the_diffs = re.findall('(@@.*?@@.*?(?=@@|git_output_checker))', the_file, re.M | re.S, ) for diff in the_diffs: diff_args = re.match('@@ -(\d+)(,(\d+))*\s+\+(\d+)(,(\d+))*', diff).groups() if diff_args[5]: for extra_line in range(int(diff_args[5])): file_data[filename].append(extra_line + int(diff_args[3])) else: file_data[filename].append(int(diff_args[3])) line_count = 0 completed_lines = 0 for file_changed, lines in file_data.items(): for line in lines: line_count += 1 used_lines = coverage_data.lines(file_changed) if not used_lines: continue if isinstance(used_lines, int): used_lines = set([used_lines]) else: used_lines = set(used_lines) if line in used_lines: completed_lines += 1 return float(completed_lines) / line_count * 100
def mangle_in_sprout_appliances(config): """ this helper function resets the appliances option of the config and mangles in the sprout ones its a hopefully temporary hack until we make a correctly ordered hook for obtaining appliances """ provision_request = SproutProvisioningRequest.from_config(config) mgr = config._sprout_mgr = SproutManager() requested_appliances = mgr.request_appliances(provision_request) config.option.appliances[:] = [] appliances = config.option.appliances log.info("Appliances were provided:") for appliance in requested_appliances: url = "https://{}/".format(appliance["ip_address"]) appliances.append(url) log.info("- %s is %s", url, appliance['name']) mgr.reset_timer() template_name = requested_appliances[0]["template_name"] conf.runtime["cfme_data"]["basic_info"]["appliance_template"] = template_name log.info("appliance_template: %s", template_name) with project_path.join('.appliance_template').open('w') as template_file: template_file.write('export appliance_template="{}"'.format(template_name)) log.info("Sprout setup finished.") config.pluginmanager.register(ShutdownPlugin())
def mangle_in_sprout_appliances(config): """ this helper function resets the appliances option of the config and mangles in the sprout ones its a hopefully temporary hack until we make a correctly ordered hook for obtaining appliances """ provision_request = SproutProvisioningRequest.from_config(config) mgr = config._sprout_mgr = SproutManager() requested_appliances = mgr.request_appliances(provision_request) config.option.appliances[:] = [] appliances = config.option.appliances log.info("Appliances were provided:") for appliance in requested_appliances: url = "https://{}/".format(appliance["ip_address"]) appliances.append(url) log.info("- %s is %s", url, appliance['name']) mgr.reset_timer() # Set the base_url for collection purposes on the first appliance conf.runtime["env"]["base_url"] = appliances[0] # Retrieve and print the template_name for Jenkins to pick up template_name = requested_appliances[0]["template_name"] conf.runtime["cfme_data"]["basic_info"]["appliance_template"] = template_name log.info("appliance_template: %s", template_name) with project_path.join('.appliance_template').open('w') as template_file: template_file.write('export appliance_template="{}"'.format(template_name)) log.info("Sprout setup finished.") config.pluginmanager.register(ShutdownPlugin())
def get_template_from_config(template_config_name, create=False): """ Convenience function to grab the details for a template from the yamls and create template. """ template_config = conf.cfme_data.get('customization_templates', {})[template_config_name] script_data = load_data_file(str( project_path.join(template_config['script_file'])), replacements=template_config['replacements']) script_data = script_data.read() appliance = get_or_create_current_appliance() collection = appliance.collections.customization_templates kwargs = { 'name': template_config['name'], 'description': template_config['description'], 'image_type': template_config['image_type'], 'script_type': template_config['script_type'], 'script_data': script_data } customization_template = collection.instantiate(**kwargs) if create and not customization_template.exists(): return collection.create(**kwargs) return customization_template
def get_template_from_config(template_config_name): """ Convenience function to grab the details for a template from the yamls and create template. """ template_config = conf.cfme_data.get('customization_templates', {})[template_config_name] script_data = load_data_file(str(project_path.join(template_config['script_file'])), replacements=template_config['replacements']) script_data = script_data.read() appliance = get_or_create_current_appliance() collection = appliance.collections.customization_templates customization_template = collection.instantiate(name=template_config['name'], description=template_config['description'], image_type=template_config['image_type'], script_type=template_config['script_type'], script_data=script_data) if not customization_template.exists(): return collection.create(name=template_config['name'], description=template_config['description'], image_type=template_config['image_type'], script_type=template_config['script_type'], script_data=script_data) return customization_template
def check_update(self): sprout_sh = project_path.join("sprout").join("sprout.sh") try: result = command.run([sprout_sh.strpath, "check-update"]) except command.CommandException as e: result = e needs_update = result.output.strip().lower() != "up-to-date" redis.set("sprout-needs-update", needs_update)
def mangle_in_sprout_appliances(config): """ this helper function resets the appliances option of the config and mangles in the sprout ones its a hopefully temporary hack until we make a correctly ordered hook for obtaining appliances """ provision_request = SproutProvisioningRequest.from_config(config) mgr = config._sprout_mgr = SproutManager(config.option.sprout_user_key) try: requested_appliances = mgr.request_appliances(provision_request) except AuthException: log.exception( 'Sprout client not authenticated, please provide env vars or sprout_user_key' ) raise config.option.appliances[:] = [] appliances = config.option.appliances log.info("Appliances were provided:") for appliance in requested_appliances: appliance_args = {'hostname': appliance['url']} provider_data = conf.cfme_data['management_systems'].get( appliance['provider']) if provider_data and provider_data['type'] == 'openshift': ocp_creds = conf.credentials[provider_data['credentials']] ssh_creds = conf.credentials[provider_data['ssh_creds']] extra_args = { 'container': appliance['container'], 'db_host': appliance['db_host'], 'project': appliance['project'], 'openshift_creds': { 'hostname': provider_data['hostname'], 'username': ocp_creds['username'], 'password': ocp_creds['password'], 'ssh': { 'username': ssh_creds['username'], 'password': ssh_creds['password'], } } } appliance_args.update(extra_args) appliances.append(appliance_args) log.info("- %s is %s", appliance['url'], appliance['name']) mgr.reset_timer() template_name = requested_appliances[0]["template_name"] conf.runtime["cfme_data"]["basic_info"][ "appliance_template"] = template_name log.info("appliance_template: %s", template_name) with project_path.join('.appliance_template').open('w') as template_file: template_file.write( 'export appliance_template="{}"'.format(template_name)) log.info("Sprout setup finished.") config.pluginmanager.register(ShutdownPlugin())
def mangle_in_sprout_appliances(config): """ this helper function resets the appliances option of the config and mangles in the sprout ones its a hopefully temporary hack until we make a correctly ordered hook for obtaining appliances """ provision_request = SproutProvisioningRequest.from_config(config) mgr = config._sprout_mgr = SproutManager(config.option.sprout_user_key) try: requested_appliances = mgr.request_appliances(provision_request) except AuthException: log.exception('Sprout client not authenticated, please provide env vars or sprout_user_key') raise config.option.appliances[:] = [] appliances = config.option.appliances log.info("Appliances were provided:") for appliance in requested_appliances: appliance_args = {'hostname': appliance['url']} provider_data = conf.cfme_data['management_systems'].get(appliance['provider']) if provider_data and provider_data['type'] == 'openshift': ocp_creds = conf.credentials[provider_data['credentials']] ssh_creds = conf.credentials[provider_data['ssh_creds']] extra_args = { 'container': appliance['container'], 'db_host': appliance['db_host'], 'project': appliance['project'], 'openshift_creds': { 'hostname': provider_data['hostname'], 'username': ocp_creds['username'], 'password': ocp_creds['password'], 'ssh': { 'username': ssh_creds['username'], 'password': ssh_creds['password'], } } } appliance_args.update(extra_args) appliances.append(appliance_args) log.info("- %s is %s", appliance['url'], appliance['name']) mgr.reset_timer() template_name = requested_appliances[0]["template_name"] conf.runtime["cfme_data"]["basic_info"]["appliance_template"] = template_name log.info("appliance_template: %s", template_name) with project_path.join('.appliance_template').open('w') as template_file: template_file.write('export appliance_template="{}"'.format(template_name)) log.info("Sprout setup finished.") config.pluginmanager.register(ShutdownPlugin())
def pytest_configure(config): path = cfme_data.get('cfme_annotations_path') if path: to_parse = project_path.join(path) parsed = parse(to_parse) if not parsed: store.terminalreporter.line( 'no test annotation found in {}'.format(to_parse), yellow=True) else: store.terminalreporter.line('no test annotation found in {}'.format(path), yellow=True) parsed = [] config.pluginmanager.register(MarkFromMap.from_parsed_list( parsed, 'tier', pytest.mark.tier)) config.pluginmanager.register(MarkFromMap.from_parsed_list( parsed, 'requirement', pytest.mark.requirement)) config.pluginmanager.register(MarkFromMap.from_parsed_list(parsed, 'type', pytest.mark.__getattr__))
def get_template_from_config(template_config_name): """ Convenience function to grab the details for a template from the yamls. """ template_config = conf.cfme_data.get('customization_templates', {})[template_config_name] script_data = load_data_file(str(project_path.join(template_config['script_file'])), replacements=template_config['replacements']) script_data = script_data.read() return CustomizationTemplate(name=template_config['name'], description=template_config['description'], image_type=template_config['image_type'], script_type=template_config['script_type'], script_data=script_data)
def pytest_configure(config): path = cfme_data.get('cfme_annotations_path') if path: to_parse = project_path.join(path) parsed = parse(to_parse) if not parsed: store.terminalreporter.line( 'no test annotation found in {}'.format(to_parse), yellow=True) else: store.terminalreporter.line( 'no test annotation found in {}'.format(path), yellow=True) parsed = [] config.pluginmanager.register( MarkFromMap.from_parsed_list(parsed, 'tier', pytest.mark.tier)) config.pluginmanager.register( MarkFromMap.from_parsed_list(parsed, 'requirement', pytest.mark.requirement)) config.pluginmanager.register( MarkFromMap.from_parsed_list(parsed, 'type', pytest.mark.__getattr__))
def get_template_from_config(template_config_name): """ Convenience function to grab the details for a template from the yamls. """ template_config = conf.cfme_data.get('customization_templates', {})[template_config_name] script_data = load_data_file(str( project_path.join(template_config['script_file'])), replacements=template_config['replacements']) script_data = script_data.read() appliance = get_or_create_current_appliance() collection = appliance.collections.customization_templates return collection.instantiate(name=template_config['name'], description=template_config['description'], image_type=template_config['image_type'], script_type=template_config['script_type'], script_data=script_data)
def get_template_from_config(template_config_name, create=False, appliance=None): """ Convenience function to grab the details for a template from the yamls and create template. """ assert appliance is not None template_config = conf.cfme_data.get('customization_templates', {})[template_config_name] script_data = load_data_file(str(project_path.join(template_config['script_file'])), replacements=template_config['replacements']) script_data = script_data.read() collection = appliance.collections.customization_templates kwargs = { 'name': template_config['name'], 'description': template_config['description'], 'image_type': template_config['image_type'], 'script_type': template_config['script_type'], 'script_data': script_data } customization_template = collection.instantiate(**kwargs) if create and not customization_template.exists(): return collection.create(**kwargs) return customization_template
def pytest_configure(config): if config.getoption("appliances"): return if not config.getoption('--use-sprout'): return provision_request = SproutProvisioningRequest.from_config(config) mgr = config._sprout_mgr = SproutManager() requested_appliances = mgr.request_appliances(provision_request) config.option.appliances[:] = [] appliances = config.option.appliances # Push an appliance to the stack to have proper reference for test collection # FIXME: this is a bad hack based on the need for controll of collection partitioning appliance_stack.push( IPAppliance(address=requested_appliances[0]["ip_address"])) log.info("Appliances were provided:") for appliance in requested_appliances: url = "https://{}/".format(appliance["ip_address"]) appliances.append(url) log.info("- %s is %s", url, appliance['name']) mgr.reset_timer() # Set the base_url for collection purposes on the first appliance conf.runtime["env"]["base_url"] = appliances[0] # Retrieve and print the template_name for Jenkins to pick up template_name = requested_appliances[0]["template_name"] conf.runtime["cfme_data"]["basic_info"][ "appliance_template"] = template_name log.info("appliance_template: %s", template_name) with project_path.join('.appliance_template').open('w') as template_file: template_file.write( 'export appliance_template="{}"'.format(template_name)) log.info("Sprout setup finished.") config.pluginmanager.register(ShutdownPlugin())
def local_file_path(self): return project_path.join(self.image_name).strpath
return cmp(self.rc, other) elif isinstance(other, six.string_types): return cmp(self.output, other) else: raise ValueError('You can only compare SSHResult with str or int') @property def success(self): return self.rc == 0 @property def failed(self): return self.rc != 0 _ssh_key_file = project_path.join('.generated_ssh_key') _ssh_pubkey_file = project_path.join('.generated_ssh_key.pub') _client_session = [] class SSHClient(paramiko.SSHClient): """paramiko.SSHClient wrapper Allows copying/overriding and use as a context manager Constructor kwargs are handed directly to paramiko.SSHClient.connect() Args: container: If specified, then it is assumed that the VM hosts a container of CFME. The param then contains the name of the container. project: openshift's project which holds CFME pods
file_part = caseid[:-needle - 1].replace('.', os.sep) else: file_part = caseid return "{}.py::{}{}".format(file_part, attribute_part, parameter_part) def _clean(mapping): mapping.pop('', '') try: return { 'requirement': int(mapping['Requirement']), 'tier': int(mapping['TestTier']), 'id': generate_nodeid(mapping), 'type': mapping['TestType'].lower(), } except (TypeError, ValueError): return None def parse(path): if not path.check(): return [] with path.open() as fp: return filter(None, map(_clean, csv.DictReader(fp))) if __name__ == '__main__': mapping_file = project_path.join(py.std.sys.argv[1]) print(yaml.safe_dump(parse(mapping_file), default_flow_style=False))
def local_file_path(self): return project_path.join(self.image_name).strpath
@contextmanager def appliances_ignored_when_renaming(self, *appliances): with self.atomic() as client: ignored_appliances = client._get("renaming_appliances") if ignored_appliances is None: ignored_appliances = set([]) for appliance in appliances: ignored_appliances.add(appliance) client._set("renaming_appliances", ignored_appliances) yield with self.atomic() as client: ignored_appliances = client._get("renaming_appliances") if ignored_appliances is None: ignored_appliances = set([]) for appliance in appliances: try: ignored_appliances.remove(appliance) except KeyError: # Something worng happened, ignore pass client._set("renaming_appliances", ignored_appliances) @property def renaming_appliances(self): return self.get("renaming_appliances") or set([]) redis = RedisWrapper(redis_client) sprout_path = project_path.join("sprout")
file_part = caseid[:-needle - 1].replace('.', os.sep) else: file_part = caseid return "{}.py::{}{}".format(file_part, attribute_part, parameter_part) def _clean(mapping): mapping.pop('', '') try: return { 'requirement': int(mapping['Requirement']), 'tier': int(mapping['TestTier']), 'id': generate_nodeid(mapping), 'type': mapping['TestType'].lower(), } except (TypeError, ValueError): return None def parse(path): if not path.check(): return [] with path.open() as fp: return filter(None, map(_clean, csv.DictReader(fp))) if __name__ == '__main__': mapping_file = project_path.join(py.std.sys.argv[1]) print(yaml.dump(parse(mapping_file), default_flow_style=False))
return self.rc < other elif isinstance(other, str): return self.output < other else: raise ValueError('You can only compare SSHResult with str or int') @property def success(self): return self.rc == 0 @property def failed(self): return self.rc != 0 _ssh_key_file = project_path.join('.generated_ssh_key') _ssh_pubkey_file = project_path.join('.generated_ssh_key.pub') _client_session = list() class SSHClient(paramiko.SSHClient): """paramiko.SSHClient wrapper Allows copying/overriding and use as a context manager Constructor kwargs are handed directly to paramiko.SSHClient.connect() Args: container: If specified, then it is assumed that the VM hosts a container of CFME. The param then contains the name of the container. project: openshift's project which holds CFME pods
@contextmanager def appliances_ignored_when_renaming(self, *appliances): with self.atomic() as client: ignored_appliances = client._get("renaming_appliances") if ignored_appliances is None: ignored_appliances = set([]) for appliance in appliances: ignored_appliances.add(appliance) client._set("renaming_appliances", ignored_appliances) yield with self.atomic() as client: ignored_appliances = client._get("renaming_appliances") if ignored_appliances is None: ignored_appliances = set([]) for appliance in appliances: try: ignored_appliances.remove(appliance) except KeyError: # Something worng happened, ignore pass client._set("renaming_appliances", ignored_appliances) @property def renaming_appliances(self): return self.get("renaming_appliances") or set([]) redis = RedisWrapper(redis_client) sprout_path = project_path.join("sprout")
import json import os import os.path from datetime import datetime from artifactor.plugins.post_result import test_report from cfme.utils import read_env from cfme.utils.path import project_path from cfme.utils.trackerbot import post_jenkins_result job_name = os.environ['JOB_NAME'] number = int(os.environ['BUILD_NUMBER']) date = str(datetime.now()) # reduce returns to bools for easy logic runner_src = read_env(project_path.join('.jenkins_runner_result')) runner_return = runner_src.get('RUNNER_RETURN', '1') == '0' test_return = runner_src.get('TEST_RETURN', '1') == '0' # 'stream' environ is set by jenkins for all stream test jobs # but not in the template tester if job_name not in ('template-tester', 'template-tester-openstack', 'template-tester-rhevm', 'template-tester-virtualcenter'): # try to pull out the appliance template name template_src = read_env(project_path.join('.appliance_template')) template = template_src.get('appliance_template', 'Unknown') stream = os.environ['stream'] else: tester_src = read_env(project_path.join('.template_tester')) stream = tester_src['stream'] template = tester_src['appliance_template']