def output_summary(path, limit=0, seed=None, subset=None, show_desc=True, show_frag=False, show_matrix=False, filter_in=None, filter_out=None, filter_all=None, filter_fragments=True): """ Prints number of all facets for a given suite for inspection, taking into accout such options like --subset, --filter, --filter-out and --filter-all. Optionally dumps matrix objects, yaml files which is used for generating combinations. """ random.seed(seed) mat, first, matlimit = _get_matrix(path, subset) configs = generate_combinations(path, mat, first, matlimit) count = 0 suite = os.path.basename(path) config_list = util.filter_configs(configs, suite_name=suite, filter_in=filter_in, filter_out=filter_out, filter_all=filter_all, filter_fragments=filter_fragments) for c in config_list: if limit and count >= limit: break count += 1 if show_desc or show_frag: print("{}".format(c[0])) if show_frag: for path in c[1]: print(" {}".format(util.strip_fragment_path(path))) if show_matrix: print(mat.tostr(1)) print("# {}/{} {}".format(count, len(configs), path))
def collect_jobs(self, arch, configs, newest=False): jobs_to_schedule = [] jobs_missing_packages = [] for description, fragment_paths in configs: base_frag_paths = [ util.strip_fragment_path(x) for x in fragment_paths ] limit = self.args.limit if limit > 0 and len(jobs_to_schedule) >= limit: log.info( 'Stopped after {limit} jobs due to --limit={limit}'.format( limit=limit)) break # Break apart the filter parameter (one string) into comma # separated components to be used in searches. filter_in = self.args.filter_in if filter_in: if not any([x in description for x in filter_in]): for filt_samp in filter_in: if any(x.find(filt_samp) >= 0 for x in base_frag_paths): break else: continue filter_out = self.args.filter_out if filter_out: if any([x in description for x in filter_out]): continue is_collected = True for filt_samp in filter_out: if any(filt_samp in x for x in base_frag_paths): is_collected = False break if not is_collected: continue raw_yaml = '\n'.join([file(a, 'r').read() for a in fragment_paths]) parsed_yaml = yaml.load(raw_yaml) os_type = parsed_yaml.get('os_type') or self.base_config.os_type os_version = parsed_yaml.get('os_version') or self.base_config.os_version exclude_arch = parsed_yaml.get('exclude_arch') exclude_os_type = parsed_yaml.get('exclude_os_type') if exclude_arch and exclude_arch == arch: log.info('Skipping due to excluded_arch: %s facets %s', exclude_arch, description) continue if exclude_os_type and exclude_os_type == os_type: log.info('Skipping due to excluded_os_type: %s facets %s', exclude_os_type, description) continue arg = copy.deepcopy(self.base_args) arg.extend([ '--description', description, '--', ]) arg.extend(self.base_yaml_paths) arg.extend(fragment_paths) job = dict( yaml=parsed_yaml, desc=description, sha1=self.base_config.sha1, args=arg ) sha1 = self.base_config.sha1 if config.suite_verify_ceph_hash: full_job_config = copy.deepcopy(self.base_config.to_dict()) deep_merge(full_job_config, parsed_yaml) flavor = util.get_install_task_flavor(full_job_config) # Get package versions for this sha1, os_type and flavor. If # we've already retrieved them in a previous loop, they'll be # present in package_versions and gitbuilder will not be asked # again for them. try: self.package_versions = util.get_package_versions( sha1, os_type, os_version, flavor, self.package_versions ) except VersionNotFoundError: pass if not util.has_packages_for_distro( sha1, os_type, os_version, flavor, self.package_versions ): m = "Packages for os_type '{os}', flavor {flavor} and " + \ "ceph hash '{ver}' not found" log.error(m.format(os=os_type, flavor=flavor, ver=sha1)) jobs_missing_packages.append(job) # optimization: one missing package causes backtrack in newest mode; # no point in continuing the search if newest: return jobs_missing_packages, None jobs_to_schedule.append(job) return jobs_missing_packages, jobs_to_schedule