Exemplo n.º 1
0
 def test_os_in_package_versions(self):
     assert self.pv == util.get_package_versions(
         "sha1",
         "ubuntu",
         "basic",
         package_versions=self.pv
     )
Exemplo n.º 2
0
 def test_os_not_in_package_versions(self, m_package_versions_for_hash):
     m_package_versions_for_hash.return_value = "1.1"
     result = util.get_package_versions(
         "sha1", "rhel", "basic", package_versions=self.pv)
     expected = deepcopy(self.pv)
     expected['sha1'].update(dict(rhel=dict(basic="1.1")))
     assert result == expected
Exemplo n.º 3
0
 def test_package_versions_not_found(self, m_package_versions_for_hash):
     # if gitbuilder returns a status that's not a 200, None is returned
     m_package_versions_for_hash.return_value = None
     result = util.get_package_versions("sha1",
                                        "rhel",
                                        "basic",
                                        package_versions=self.pv)
     assert result == self.pv
Exemplo n.º 4
0
 def test_no_package_versions_kwarg(self, m_package_versions_for_hash):
     m_package_versions_for_hash.return_value = "1.0"
     result = util.get_package_versions(
         "sha1",
         "ubuntu",
         "basic",
     )
     expected = deepcopy(self.pv)
     assert result == expected
Exemplo n.º 5
0
 def test_no_package_versions_kwarg(self, m_package_versions_for_hash):
     m_package_versions_for_hash.return_value = "1.0"
     result = util.get_package_versions(
         "sha1",
         "ubuntu",
         "basic",
     )
     expected = deepcopy(self.pv)
     assert result == expected
Exemplo n.º 6
0
 def test_os_not_in_package_versions(self, m_package_versions_for_hash):
     m_package_versions_for_hash.return_value = "1.1"
     result = util.get_package_versions("sha1",
                                        "rhel",
                                        "basic",
                                        package_versions=self.pv)
     expected = deepcopy(self.pv)
     expected['sha1'].update(dict(rhel=dict(basic="1.1")))
     assert result == expected
Exemplo n.º 7
0
 def test_os_not_in_package_versions(self, m_package_versions_for_hash):
     m_package_versions_for_hash.return_value = "1.1"
     result = util.get_package_versions("sha1",
                                        "rhel",
                                        "7.0",
                                        "basic",
                                        package_versions=self.pv)
     expected = deepcopy(self.pv)
     expected['sha1'].update({'rhel': {'7.0': {'basic': '1.1'}}})
     assert result == expected
Exemplo n.º 8
0
 def test_package_versions_not_found(self, m_package_versions_for_hash):
     # if gitbuilder returns a status that's not a 200, None is returned
     m_package_versions_for_hash.return_value = None
     result = util.get_package_versions(
         "sha1",
         "rhel",
         "basic",
         package_versions=self.pv
     )
     assert result == self.pv
Exemplo n.º 9
0
 def test_os_not_in_package_versions(self, m_package_versions_for_hash):
     m_package_versions_for_hash.return_value = "1.1"
     result = util.get_package_versions(
         "sha1",
         "rhel",
         "7.0",
         "basic",
         package_versions=self.pv
     )
     expected = deepcopy(self.pv)
     expected['sha1'].update(
         {
             'rhel': {
                 '7.0': {
                     'basic': '1.1'
                 }
             }
         }
     )
     assert result == expected
Exemplo n.º 10
0
 def test_os_in_package_versions(self):
     assert self.pv == util.get_package_versions("sha1",
                                                 "ubuntu",
                                                 "14.04",
                                                 "basic",
                                                 package_versions=self.pv)
Exemplo n.º 11
0
    def collect_jobs(self, arch, configs, newest=False, limit=0):
        jobs_to_schedule = []
        jobs_missing_packages = []
        for description, fragment_paths in configs:
            if limit > 0 and len(jobs_to_schedule) >= limit:
                log.info(
                    'Stopped after {limit} jobs due to --limit={limit}'.format(
                        limit=limit))
                break

            raw_yaml = '\n'.join([open(a, 'r').read() for a in fragment_paths])

            parsed_yaml = yaml.safe_load(raw_yaml)
            os_type = parsed_yaml.get('os_type') or self.base_config.os_type
            os_version = parsed_yaml.get(
                'os_version') or self.base_config.os_version
            exclude_arch = parsed_yaml.get('exclude_arch')
            exclude_os_type = parsed_yaml.get('exclude_os_type')

            if exclude_arch and exclude_arch == arch:
                log.info('Skipping due to excluded_arch: %s facets %s',
                         exclude_arch, description)
                continue
            if exclude_os_type and exclude_os_type == os_type:
                log.info('Skipping due to excluded_os_type: %s facets %s',
                         exclude_os_type, description)
                continue

            arg = copy.deepcopy(self.base_args)
            arg.extend([
                '--num',
                str(self.args.num),
                '--description',
                description,
                '--',
            ])
            arg.extend(self.base_yaml_paths)
            arg.extend(fragment_paths)

            job = dict(yaml=parsed_yaml,
                       desc=description,
                       sha1=self.base_config.sha1,
                       args=arg)

            sha1 = self.base_config.sha1
            if parsed_yaml.get('verify_ceph_hash',
                               config.suite_verify_ceph_hash):
                full_job_config = copy.deepcopy(self.base_config.to_dict())
                deep_merge(full_job_config, parsed_yaml)
                flavor = util.get_install_task_flavor(full_job_config)
                # Get package versions for this sha1, os_type and flavor. If
                # we've already retrieved them in a previous loop, they'll be
                # present in package_versions and gitbuilder will not be asked
                # again for them.
                try:
                    self.package_versions = util.get_package_versions(
                        sha1, os_type, os_version, flavor,
                        self.package_versions)
                except VersionNotFoundError:
                    pass
                if not util.has_packages_for_distro(sha1, os_type, os_version,
                                                    flavor,
                                                    self.package_versions):
                    m = "Packages for os_type '{os}', flavor {flavor} and " + \
                        "ceph hash '{ver}' not found"
                    log.error(m.format(os=os_type, flavor=flavor, ver=sha1))
                    jobs_missing_packages.append(job)
                    # optimization: one missing package causes backtrack in newest mode;
                    # no point in continuing the search
                    if newest:
                        return jobs_missing_packages, None

            jobs_to_schedule.append(job)
        return jobs_missing_packages, jobs_to_schedule
Exemplo n.º 12
0
    def collect_jobs(self, arch, configs, newest=False):
        jobs_to_schedule = []
        jobs_missing_packages = []
        for description, fragment_paths in configs:
            base_frag_paths = [
                util.strip_fragment_path(x) for x in fragment_paths
            ]
            limit = self.args.limit
            if limit > 0 and len(jobs_to_schedule) >= limit:
                log.info(
                    'Stopped after {limit} jobs due to --limit={limit}'.format(
                        limit=limit))
                break
            # Break apart the filter parameter (one string) into comma
            # separated components to be used in searches.
            filter_in = self.args.filter_in
            if filter_in:
                if not any([x in description for x in filter_in]):
                    for filt_samp in filter_in:
                        if any(x.find(filt_samp) >= 0 for x in base_frag_paths):
                            break
                    else:
                        continue
            filter_out = self.args.filter_out
            if filter_out:
                if any([x in description for x in filter_out]):
                    continue
                is_collected = True
                for filt_samp in filter_out:
                    if any(filt_samp in x for x in base_frag_paths):
                        is_collected = False
                        break
                if not is_collected:
                    continue

            raw_yaml = '\n'.join([file(a, 'r').read() for a in fragment_paths])

            parsed_yaml = yaml.load(raw_yaml)
            os_type = parsed_yaml.get('os_type') or self.base_config.os_type
            os_version = parsed_yaml.get('os_version') or self.base_config.os_version
            exclude_arch = parsed_yaml.get('exclude_arch')
            exclude_os_type = parsed_yaml.get('exclude_os_type')

            if exclude_arch and exclude_arch == arch:
                log.info('Skipping due to excluded_arch: %s facets %s',
                         exclude_arch, description)
                continue
            if exclude_os_type and exclude_os_type == os_type:
                log.info('Skipping due to excluded_os_type: %s facets %s',
                         exclude_os_type, description)
                continue

            arg = copy.deepcopy(self.base_args)
            arg.extend([
                '--description', description,
                '--',
            ])
            arg.extend(self.base_yaml_paths)
            arg.extend(fragment_paths)

            job = dict(
                yaml=parsed_yaml,
                desc=description,
                sha1=self.base_config.sha1,
                args=arg
            )

            sha1 = self.base_config.sha1
            if config.suite_verify_ceph_hash:
                full_job_config = copy.deepcopy(self.base_config.to_dict())
                deep_merge(full_job_config, parsed_yaml)
                flavor = util.get_install_task_flavor(full_job_config)
                # Get package versions for this sha1, os_type and flavor. If
                # we've already retrieved them in a previous loop, they'll be
                # present in package_versions and gitbuilder will not be asked
                # again for them.
                try:
                    self.package_versions = util.get_package_versions(
                        sha1,
                        os_type,
                        os_version,
                        flavor,
                        self.package_versions
                    )
                except VersionNotFoundError:
                    pass
                if not util.has_packages_for_distro(
                    sha1, os_type, os_version, flavor, self.package_versions
                ):
                    m = "Packages for os_type '{os}', flavor {flavor} and " + \
                        "ceph hash '{ver}' not found"
                    log.error(m.format(os=os_type, flavor=flavor, ver=sha1))
                    jobs_missing_packages.append(job)
                    # optimization: one missing package causes backtrack in newest mode;
                    # no point in continuing the search
                    if newest:
                        return jobs_missing_packages, None

            jobs_to_schedule.append(job)
        return jobs_missing_packages, jobs_to_schedule