def __init__(self, args): """ :type args: TestConfig """ self.args = args self.integration_all_target = get_integration_all_target(self.args) self.integration_targets = list(walk_integration_targets()) self.module_targets = list(walk_module_targets()) self.compile_targets = list(walk_compile_targets()) self.units_targets = list(walk_units_targets()) self.sanity_targets = list(walk_sanity_targets()) self.powershell_targets = [t for t in self.sanity_targets if os.path.splitext(t.path)[1] == '.ps1'] self.units_modules = set(t.module for t in self.units_targets if t.module) self.units_paths = set(a for t in self.units_targets for a in t.aliases) self.sanity_paths = set(t.path for t in self.sanity_targets) self.module_names_by_path = dict((t.path, t.module) for t in self.module_targets) self.integration_targets_by_name = dict((t.name, t) for t in self.integration_targets) self.integration_targets_by_alias = dict((a, t) for t in self.integration_targets for a in t.aliases) self.posix_integration_by_module = dict((m, t.name) for t in self.integration_targets if 'posix/' in t.aliases for m in t.modules) self.windows_integration_by_module = dict((m, t.name) for t in self.integration_targets if 'windows/' in t.aliases for m in t.modules) self.network_integration_by_module = dict((m, t.name) for t in self.integration_targets if 'network/' in t.aliases for m in t.modules) self.prefixes = load_integration_prefixes() self.integration_dependencies = analyze_integration_target_dependencies(self.integration_targets) self.python_module_utils_imports = {} # populated on first use to reduce overhead when not needed self.powershell_module_utils_imports = {} # populated on first use to reduce overhead when not needed
def __init__(self): self.integration_targets = list(walk_integration_targets()) self.module_targets = list(walk_module_targets()) self.compile_targets = list(walk_compile_targets()) self.units_targets = list(walk_units_targets()) self.sanity_targets = list(walk_sanity_targets()) self.compile_paths = set(t.path for t in self.compile_targets) self.units_modules = set(t.module for t in self.units_targets if t.module) self.units_paths = set(a for t in self.units_targets for a in t.aliases) self.sanity_paths = set(t.path for t in self.sanity_targets) self.module_names_by_path = dict((t.path, t.module) for t in self.module_targets) self.integration_targets_by_name = dict((t.name, t) for t in self.integration_targets) self.integration_targets_by_alias = dict((a, t) for t in self.integration_targets for a in t.aliases) self.posix_integration_by_module = dict((m, t.name) for t in self.integration_targets if 'posix/' in t.aliases for m in t.modules) self.windows_integration_by_module = dict((m, t.name) for t in self.integration_targets if 'windows/' in t.aliases for m in t.modules) self.network_integration_by_module = dict((m, t.name) for t in self.integration_targets if 'network/' in t.aliases for m in t.modules) self.prefixes = load_integration_prefixes() self.python_module_utils_imports = {} # populated on first use to reduce overhead when not needed
def check_changes(self, args, results): """ :type args: SanityConfig :type results: dict[str, any] """ integration_targets = list(walk_integration_targets()) module_targets = list(walk_module_targets()) integration_targets_by_name = dict( (t.name, t) for t in integration_targets) module_names_by_path = dict((t.path, t.module) for t in module_targets) disabled_targets = [] unstable_targets = [] unsupported_targets = [] for command in [ command for command in args.metadata.change_description.focused_command_targets if 'integration' in command ]: for target in args.metadata.change_description.focused_command_targets[ command]: if self.DISABLED in integration_targets_by_name[ target].aliases: disabled_targets.append(target) elif self.UNSTABLE in integration_targets_by_name[ target].aliases: unstable_targets.append(target) elif self.UNSUPPORTED in integration_targets_by_name[ target].aliases: unsupported_targets.append(target) untested_modules = [] for path in args.metadata.change_description.no_integration_paths: module = module_names_by_path.get(path) if module: untested_modules.append(module) comments = [ self.format_comment(self.TEMPLATE_DISABLED, disabled_targets), self.format_comment(self.TEMPLATE_UNSTABLE, unstable_targets), self.format_comment(self.TEMPLATE_UNSUPPORTED, unsupported_targets), self.format_comment(self.TEMPLATE_UNTESTED, untested_modules), ] comments = [comment for comment in comments if comment] labels = dict( needs_tests=bool(untested_modules), disabled_tests=bool(disabled_targets), unstable_tests=bool(unstable_targets), unsupported_tests=bool(unsupported_targets), ) results['comments'] += comments results['labels'].update(labels)
def __init__(self): self.integration_targets = list(walk_integration_targets()) self.module_targets = list(walk_module_targets()) self.compile_targets = list(walk_compile_targets()) self.units_targets = list(walk_units_targets()) self.sanity_targets = list(walk_sanity_targets()) self.compile_paths = set(t.path for t in self.compile_targets) self.units_modules = set(t.module for t in self.units_targets if t.module) self.units_paths = set(a for t in self.units_targets for a in t.aliases) self.sanity_paths = set(t.path for t in self.sanity_targets) self.module_names_by_path = dict( (t.path, t.module) for t in self.module_targets) self.integration_targets_by_name = dict( (t.name, t) for t in self.integration_targets) self.integration_targets_by_alias = dict( (a, t) for t in self.integration_targets for a in t.aliases) self.posix_integration_by_module = dict( (m, t.name) for t in self.integration_targets if 'posix/' in t.aliases for m in t.modules) self.windows_integration_by_module = dict( (m, t.name) for t in self.integration_targets if 'windows/' in t.aliases for m in t.modules) self.network_integration_by_module = dict( (m, t.name) for t in self.integration_targets if 'network/' in t.aliases for m in t.modules) self.prefixes = load_integration_prefixes()
def check_changes(self, args, results): """ :type args: SanityConfig :type results: dict[str, any] """ integration_targets = list(walk_integration_targets()) module_targets = list(walk_module_targets()) integration_targets_by_name = dict((t.name, t) for t in integration_targets) module_names_by_path = dict((t.path, t.module) for t in module_targets) disabled_targets = [] unstable_targets = [] unsupported_targets = [] for command in [command for command in args.metadata.change_description.focused_command_targets if 'integration' in command]: for target in args.metadata.change_description.focused_command_targets[command]: if self.DISABLED in integration_targets_by_name[target].aliases: disabled_targets.append(target) elif self.UNSTABLE in integration_targets_by_name[target].aliases: unstable_targets.append(target) elif self.UNSUPPORTED in integration_targets_by_name[target].aliases: unsupported_targets.append(target) untested_modules = [] for path in args.metadata.change_description.no_integration_paths: module = module_names_by_path.get(path) if module: untested_modules.append(module) comments = [ self.format_comment(self.TEMPLATE_DISABLED, disabled_targets), self.format_comment(self.TEMPLATE_UNSTABLE, unstable_targets), self.format_comment(self.TEMPLATE_UNSUPPORTED, unsupported_targets), self.format_comment(self.TEMPLATE_UNTESTED, untested_modules), ] comments = [comment for comment in comments if comment] labels = dict( needs_tests=bool(untested_modules), disabled_tests=bool(disabled_targets), unstable_tests=bool(unstable_targets), unsupported_tests=bool(unsupported_targets), ) results['comments'] += comments results['labels'].update(labels)
def __init__(self): self.integration_targets = list(walk_integration_targets()) self.module_targets = list(walk_module_targets()) self.compile_targets = list(walk_compile_targets()) self.units_targets = list(walk_units_targets()) self.sanity_targets = list(walk_sanity_targets()) self.compile_paths = set(t.path for t in self.compile_targets) self.units_modules = set(t.module for t in self.units_targets if t.module) self.units_paths = set(a for t in self.units_targets for a in t.aliases) self.sanity_paths = set(t.path for t in self.sanity_targets) self.module_names_by_path = dict((t.path, t.module) for t in self.module_targets) self.integration_targets_by_name = dict((t.name, t) for t in self.integration_targets) self.posix_integration_by_module = dict((m, t.name) for t in self.integration_targets if 'posix/' in t.aliases for m in t.modules) self.windows_integration_by_module = dict((m, t.name) for t in self.integration_targets if 'windows/' in t.aliases for m in t.modules) self.network_integration_by_module = dict((m, t.name) for t in self.integration_targets if 'network/' in t.aliases for m in t.modules)
def command_coverage_combine(args): """Patch paths in coverage files and merge into a single file. :type args: CoverageConfig :rtype: list[str] """ coverage = initialize_coverage(args) modules = dict((t.module, t.path) for t in list(walk_module_targets())) coverage_files = [os.path.join(COVERAGE_DIR, f) for f in os.listdir(COVERAGE_DIR) if '=coverage.' in f] ansible_path = os.path.abspath('lib/ansible/') + '/' root_path = os.getcwd() + '/' counter = 0 groups = {} if args.all or args.stub: sources = sorted(os.path.abspath(target.path) for target in walk_compile_targets()) else: sources = [] if args.stub: groups['=stub'] = dict((source, set()) for source in sources) for coverage_file in coverage_files: counter += 1 display.info('[%4d/%4d] %s' % (counter, len(coverage_files), coverage_file), verbosity=2) original = coverage.CoverageData() group = get_coverage_group(args, coverage_file) if group is None: display.warning('Unexpected name for coverage file: %s' % coverage_file) continue if os.path.getsize(coverage_file) == 0: display.warning('Empty coverage file: %s' % coverage_file) continue try: original.read_file(coverage_file) except Exception as ex: # pylint: disable=locally-disabled, broad-except display.error(str(ex)) continue for filename in original.measured_files(): arcs = set(original.arcs(filename) or []) if not arcs: # This is most likely due to using an unsupported version of coverage. display.warning('No arcs found for "%s" in coverage file: %s' % (filename, coverage_file)) continue if '/ansible_modlib.zip/ansible/' in filename: new_name = re.sub('^.*/ansible_modlib.zip/ansible/', ansible_path, filename) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name elif '/ansible_module_' in filename: module = re.sub('^.*/ansible_module_(?P<module>.*).py$', '\\g<module>', filename) if module not in modules: display.warning('Skipping coverage of unknown module: %s' % module) continue new_name = os.path.abspath(modules[module]) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name elif re.search('^(/.*?)?/root/ansible/', filename): new_name = re.sub('^(/.*?)?/root/ansible/', root_path, filename) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name if group not in groups: groups[group] = {} arc_data = groups[group] if filename not in arc_data: arc_data[filename] = set() arc_data[filename].update(arcs) output_files = [] for group in sorted(groups): arc_data = groups[group] updated = coverage.CoverageData() for filename in arc_data: if not os.path.isfile(filename): display.warning('Invalid coverage path: %s' % filename) continue updated.add_arcs({filename: list(arc_data[filename])}) if args.all: updated.add_arcs(dict((source, []) for source in sources)) if not args.explain: output_file = COVERAGE_FILE + group updated.write_file(output_file) output_files.append(output_file) return sorted(output_files)
def command_coverage_combine(args): """Patch paths in coverage files and merge into a single file. :type args: CoverageConfig :rtype: list[str] """ coverage = initialize_coverage(args) modules = dict((t.module, t.path) for t in list(walk_module_targets())) coverage_files = [os.path.join(COVERAGE_DIR, f) for f in os.listdir(COVERAGE_DIR) if '=coverage.' in f] ansible_path = os.path.abspath('lib/ansible/') + '/' root_path = os.getcwd() + '/' counter = 0 groups = {} if args.all or args.stub: sources = sorted(os.path.abspath(target.path) for target in walk_compile_targets()) else: sources = [] if args.stub: groups['=stub'] = dict((source, set()) for source in sources) for coverage_file in coverage_files: counter += 1 display.info('[%4d/%4d] %s' % (counter, len(coverage_files), coverage_file), verbosity=2) original = coverage.CoverageData() group = get_coverage_group(args, coverage_file) if group is None: display.warning('Unexpected name for coverage file: %s' % coverage_file) continue if os.path.getsize(coverage_file) == 0: display.warning('Empty coverage file: %s' % coverage_file) continue try: original.read_file(coverage_file) except Exception as ex: # pylint: disable=locally-disabled, broad-except display.error(str(ex)) continue for filename in original.measured_files(): arcs = set(original.arcs(filename) or []) if not arcs: # This is most likely due to using an unsupported version of coverage. display.warning('No arcs found for "%s" in coverage file: %s' % (filename, coverage_file)) continue if '/ansible_modlib.zip/ansible/' in filename: new_name = re.sub('^.*/ansible_modlib.zip/ansible/', ansible_path, filename) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name elif '/ansible_module_' in filename: module_name = re.sub('^.*/ansible_module_(?P<module>.*).py$', '\\g<module>', filename) if module_name not in modules: display.warning('Skipping coverage of unknown module: %s' % module_name) continue new_name = os.path.abspath(modules[module_name]) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name elif re.search('^(/.*?)?/root/ansible/', filename): new_name = re.sub('^(/.*?)?/root/ansible/', root_path, filename) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name if group not in groups: groups[group] = {} arc_data = groups[group] if filename not in arc_data: arc_data[filename] = set() arc_data[filename].update(arcs) output_files = [] for group in sorted(groups): arc_data = groups[group] updated = coverage.CoverageData() for filename in arc_data: if not os.path.isfile(filename): display.warning('Invalid coverage path: %s' % filename) continue updated.add_arcs({filename: list(arc_data[filename])}) if args.all: updated.add_arcs(dict((source, []) for source in sources)) if not args.explain: output_file = COVERAGE_FILE + group updated.write_file(output_file) output_files.append(output_file) return sorted(output_files)
def command_coverage_combine(args): """Patch paths in coverage files and merge into a single file. :type args: CoverageConfig """ coverage = initialize_coverage(args) modules = dict((t.module, t.path) for t in list(walk_module_targets())) coverage_files = [ os.path.join(COVERAGE_DIR, f) for f in os.listdir(COVERAGE_DIR) if f.startswith('coverage') and f != 'coverage' ] arc_data = {} ansible_path = os.path.abspath('lib/ansible/') + '/' root_path = os.getcwd() + '/' counter = 0 for coverage_file in coverage_files: counter += 1 display.info('[%4d/%4d] %s' % (counter, len(coverage_files), coverage_file), verbosity=2) original = coverage.CoverageData() if os.path.getsize(coverage_file) == 0: display.warning('Empty coverage file: %s' % coverage_file) continue try: original.read_file(coverage_file) except Exception as ex: # pylint: disable=locally-disabled, broad-except display.error(str(ex)) continue for filename in original.measured_files(): arcs = original.arcs(filename) if '/ansible_modlib.zip/ansible/' in filename: new_name = re.sub('^.*/ansible_modlib.zip/ansible/', ansible_path, filename) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name elif '/ansible_module_' in filename: module = re.sub('^.*/ansible_module_(?P<module>.*).py$', '\\g<module>', filename) new_name = os.path.abspath(modules[module]) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name elif filename.startswith('/root/ansible/'): new_name = re.sub('^/.*?/ansible/', root_path, filename) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name if filename not in arc_data: arc_data[filename] = [] arc_data[filename] += arcs updated = coverage.CoverageData() for filename in arc_data: if not os.path.isfile(filename): display.warning('Invalid coverage path: %s' % filename) continue updated.add_arcs({filename: arc_data[filename]}) if not args.explain: updated.write_file(COVERAGE_FILE)
def command_coverage_combine(args): """Patch paths in coverage files and merge into a single file. :type args: CoverageConfig :rtype: list[str] """ coverage = initialize_coverage(args) modules = dict((t.module, t.path) for t in list(walk_module_targets()) if t.path.endswith('.py')) coverage_files = [ os.path.join(COVERAGE_DIR, f) for f in os.listdir(COVERAGE_DIR) if '=coverage.' in f ] ansible_path = os.path.abspath('lib/ansible/') + '/' root_path = data_context().content.root + '/' counter = 0 groups = {} if args.all or args.stub: sources = sorted( os.path.abspath(target.path) for target in walk_compile_targets()) else: sources = [] if args.stub: stub_group = [] stub_groups = [stub_group] stub_line_limit = 500000 stub_line_count = 0 for source in sources: with open(source, 'r') as source_fd: source_line_count = len(source_fd.read().splitlines()) stub_group.append(source) stub_line_count += source_line_count if stub_line_count > stub_line_limit: stub_line_count = 0 stub_group = [] stub_groups.append(stub_group) for stub_index, stub_group in enumerate(stub_groups): if not stub_group: continue groups['=stub-%02d' % (stub_index + 1)] = dict( (source, set()) for source in stub_group) if data_context().content.collection: collection_search_re = re.compile( r'/%s/' % data_context().content.collection.directory) collection_sub_re = re.compile( r'^.*?/%s/' % data_context().content.collection.directory) else: collection_search_re = None collection_sub_re = None for coverage_file in coverage_files: counter += 1 display.info('[%4d/%4d] %s' % (counter, len(coverage_files), coverage_file), verbosity=2) original = coverage.CoverageData() group = get_coverage_group(args, coverage_file) if group is None: display.warning('Unexpected name for coverage file: %s' % coverage_file) continue if os.path.getsize(coverage_file) == 0: display.warning('Empty coverage file: %s' % coverage_file) continue try: original.read_file(coverage_file) except Exception as ex: # pylint: disable=locally-disabled, broad-except display.error(u'%s' % ex) continue for filename in original.measured_files(): arcs = set(original.arcs(filename) or []) if not arcs: # This is most likely due to using an unsupported version of coverage. display.warning('No arcs found for "%s" in coverage file: %s' % (filename, coverage_file)) continue if '/ansible_modlib.zip/ansible/' in filename: # Rewrite the module_utils path from the remote host to match the controller. Ansible 2.6 and earlier. new_name = re.sub('^.*/ansible_modlib.zip/ansible/', ansible_path, filename) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name elif collection_search_re and collection_search_re.search( filename): new_name = os.path.abspath(collection_sub_re.sub('', filename)) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name elif re.search(r'/ansible_[^/]+_payload\.zip/ansible/', filename): # Rewrite the module_utils path from the remote host to match the controller. Ansible 2.7 and later. new_name = re.sub(r'^.*/ansible_[^/]+_payload\.zip/ansible/', ansible_path, filename) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name elif '/ansible_module_' in filename: # Rewrite the module path from the remote host to match the controller. Ansible 2.6 and earlier. module_name = re.sub('^.*/ansible_module_(?P<module>.*).py$', '\\g<module>', filename) if module_name not in modules: display.warning('Skipping coverage of unknown module: %s' % module_name) continue new_name = os.path.abspath(modules[module_name]) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name elif re.search( r'/ansible_[^/]+_payload(_[^/]+|\.zip)/__main__\.py$', filename): # Rewrite the module path from the remote host to match the controller. Ansible 2.7 and later. # AnsiballZ versions using zipimporter will match the `.zip` portion of the regex. # AnsiballZ versions not using zipimporter will match the `_[^/]+` portion of the regex. module_name = re.sub( r'^.*/ansible_(?P<module>[^/]+)_payload(_[^/]+|\.zip)/__main__\.py$', '\\g<module>', filename).rstrip('_') if module_name not in modules: display.warning('Skipping coverage of unknown module: %s' % module_name) continue new_name = os.path.abspath(modules[module_name]) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name elif re.search('^(/.*?)?/root/ansible/', filename): # Rewrite the path of code running on a remote host or in a docker container as root. new_name = re.sub('^(/.*?)?/root/ansible/', root_path, filename) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name elif '/.ansible/test/tmp/' in filename: # Rewrite the path of code running from an integration test temporary directory. new_name = re.sub(r'^.*/\.ansible/test/tmp/[^/]+/', root_path, filename) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name if group not in groups: groups[group] = {} arc_data = groups[group] if filename not in arc_data: arc_data[filename] = set() arc_data[filename].update(arcs) output_files = [] invalid_path_count = 0 invalid_path_chars = 0 for group in sorted(groups): arc_data = groups[group] updated = coverage.CoverageData() for filename in arc_data: if not os.path.isfile(filename): invalid_path_count += 1 invalid_path_chars += len(filename) if args.verbosity > 1: display.warning('Invalid coverage path: %s' % filename) continue updated.add_arcs({filename: list(arc_data[filename])}) if args.all: updated.add_arcs(dict((source, []) for source in sources)) if not args.explain: output_file = COVERAGE_FILE + group updated.write_file(output_file) output_files.append(output_file) if invalid_path_count > 0: display.warning( 'Ignored %d characters from %d invalid coverage path(s).' % (invalid_path_chars, invalid_path_count)) return sorted(output_files)
def command_coverage_combine(args): """Patch paths in coverage files and merge into a single file. :type args: CoverageConfig """ coverage = initialize_coverage(args) modules = dict((t.module, t.path) for t in list(walk_module_targets())) coverage_files = [os.path.join(COVERAGE_DIR, f) for f in os.listdir(COVERAGE_DIR) if f.startswith('coverage') and f != 'coverage'] arc_data = {} ansible_path = os.path.abspath('lib/ansible/') + '/' root_path = os.getcwd() + '/' for coverage_file in coverage_files: original = coverage.CoverageData() if os.path.getsize(coverage_file) == 0: display.warning('Empty coverage file: %s' % coverage_file) continue try: original.read_file(coverage_file) except Exception as ex: # pylint: disable=locally-disabled, broad-except display.error(str(ex)) continue for filename in original.measured_files(): arcs = original.arcs(filename) if '/ansible_modlib.zip/ansible/' in filename: new_name = re.sub('^.*/ansible_modlib.zip/ansible/', ansible_path, filename) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name elif '/ansible_module_' in filename: module = re.sub('^.*/ansible_module_(?P<module>.*).py$', '\\g<module>', filename) new_name = os.path.abspath(modules[module]) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name elif filename.startswith('/root/ansible/'): new_name = re.sub('^/.*?/ansible/', root_path, filename) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name if filename not in arc_data: arc_data[filename] = [] arc_data[filename] += arcs updated = coverage.CoverageData() for filename in arc_data: if not os.path.isfile(filename): display.warning('Invalid coverage path: %s' % filename) continue updated.add_arcs({filename: arc_data[filename]}) if not args.explain: updated.write_file(COVERAGE_FILE)