def test_has_packages_no_package_versions(self, m_get_package_versions): m_get_package_versions.return_value = self.pv result = util.has_packages_for_distro( "sha1", "rhel", "basic", ) assert not result
def test_distro_has_packages(self): result = util.has_packages_for_distro( "sha1", "ubuntu", "basic", package_versions=self.pv, ) assert result
def test_distro_does_not_have_packages(self): result = util.has_packages_for_distro( "sha1", "rhel", "basic", package_versions=self.pv, ) assert not result
def collect_jobs(self, arch, configs, newest=False, limit=0): jobs_to_schedule = [] jobs_missing_packages = [] for description, fragment_paths in configs: if limit > 0 and len(jobs_to_schedule) >= limit: log.info( 'Stopped after {limit} jobs due to --limit={limit}'.format( limit=limit)) break raw_yaml = '\n'.join([open(a, 'r').read() for a in fragment_paths]) parsed_yaml = yaml.safe_load(raw_yaml) os_type = parsed_yaml.get('os_type') or self.base_config.os_type os_version = parsed_yaml.get( 'os_version') or self.base_config.os_version exclude_arch = parsed_yaml.get('exclude_arch') exclude_os_type = parsed_yaml.get('exclude_os_type') if exclude_arch and exclude_arch == arch: log.info('Skipping due to excluded_arch: %s facets %s', exclude_arch, description) continue if exclude_os_type and exclude_os_type == os_type: log.info('Skipping due to excluded_os_type: %s facets %s', exclude_os_type, description) continue arg = copy.deepcopy(self.base_args) arg.extend([ '--num', str(self.args.num), '--description', description, '--', ]) arg.extend(self.base_yaml_paths) arg.extend(fragment_paths) job = dict(yaml=parsed_yaml, desc=description, sha1=self.base_config.sha1, args=arg) sha1 = self.base_config.sha1 if parsed_yaml.get('verify_ceph_hash', config.suite_verify_ceph_hash): full_job_config = copy.deepcopy(self.base_config.to_dict()) deep_merge(full_job_config, parsed_yaml) flavor = util.get_install_task_flavor(full_job_config) # Get package versions for this sha1, os_type and flavor. If # we've already retrieved them in a previous loop, they'll be # present in package_versions and gitbuilder will not be asked # again for them. try: self.package_versions = util.get_package_versions( sha1, os_type, os_version, flavor, self.package_versions) except VersionNotFoundError: pass if not util.has_packages_for_distro(sha1, os_type, os_version, flavor, self.package_versions): m = "Packages for os_type '{os}', flavor {flavor} and " + \ "ceph hash '{ver}' not found" log.error(m.format(os=os_type, flavor=flavor, ver=sha1)) jobs_missing_packages.append(job) # optimization: one missing package causes backtrack in newest mode; # no point in continuing the search if newest: return jobs_missing_packages, None jobs_to_schedule.append(job) return jobs_missing_packages, jobs_to_schedule
def collect_jobs(self, arch, configs, newest=False): jobs_to_schedule = [] jobs_missing_packages = [] for description, fragment_paths in configs: base_frag_paths = [ util.strip_fragment_path(x) for x in fragment_paths ] limit = self.args.limit if limit > 0 and len(jobs_to_schedule) >= limit: log.info( 'Stopped after {limit} jobs due to --limit={limit}'.format( limit=limit)) break # Break apart the filter parameter (one string) into comma # separated components to be used in searches. filter_in = self.args.filter_in if filter_in: if not any([x in description for x in filter_in]): for filt_samp in filter_in: if any(x.find(filt_samp) >= 0 for x in base_frag_paths): break else: continue filter_out = self.args.filter_out if filter_out: if any([x in description for x in filter_out]): continue is_collected = True for filt_samp in filter_out: if any(filt_samp in x for x in base_frag_paths): is_collected = False break if not is_collected: continue raw_yaml = '\n'.join([file(a, 'r').read() for a in fragment_paths]) parsed_yaml = yaml.load(raw_yaml) os_type = parsed_yaml.get('os_type') or self.base_config.os_type os_version = parsed_yaml.get('os_version') or self.base_config.os_version exclude_arch = parsed_yaml.get('exclude_arch') exclude_os_type = parsed_yaml.get('exclude_os_type') if exclude_arch and exclude_arch == arch: log.info('Skipping due to excluded_arch: %s facets %s', exclude_arch, description) continue if exclude_os_type and exclude_os_type == os_type: log.info('Skipping due to excluded_os_type: %s facets %s', exclude_os_type, description) continue arg = copy.deepcopy(self.base_args) arg.extend([ '--description', description, '--', ]) arg.extend(self.base_yaml_paths) arg.extend(fragment_paths) job = dict( yaml=parsed_yaml, desc=description, sha1=self.base_config.sha1, args=arg ) sha1 = self.base_config.sha1 if config.suite_verify_ceph_hash: full_job_config = copy.deepcopy(self.base_config.to_dict()) deep_merge(full_job_config, parsed_yaml) flavor = util.get_install_task_flavor(full_job_config) # Get package versions for this sha1, os_type and flavor. If # we've already retrieved them in a previous loop, they'll be # present in package_versions and gitbuilder will not be asked # again for them. try: self.package_versions = util.get_package_versions( sha1, os_type, os_version, flavor, self.package_versions ) except VersionNotFoundError: pass if not util.has_packages_for_distro( sha1, os_type, os_version, flavor, self.package_versions ): m = "Packages for os_type '{os}', flavor {flavor} and " + \ "ceph hash '{ver}' not found" log.error(m.format(os=os_type, flavor=flavor, ver=sha1)) jobs_missing_packages.append(job) # optimization: one missing package causes backtrack in newest mode; # no point in continuing the search if newest: return jobs_missing_packages, None jobs_to_schedule.append(job) return jobs_missing_packages, jobs_to_schedule