Esempio n. 1
0
 def download_dependencies(self):
     """Download dependencies from `$deps_dir/download-requires`.
     """
     # NOTE(aababilov): do not drop download_dir - it can be reused
     sh.mkdirslist(self.download_dir, tracewriter=self.tracewriter)
     download_requires_filename = sh.joinpths(self.deps_dir,
                                              "download-requires")
     raw_pips_to_download = self.filter_download_requires()
     sh.write_file(download_requires_filename,
                   "\n".join(str(req) for req in raw_pips_to_download))
     if not raw_pips_to_download:
         return ([], [])
     downloaded_flag_file = sh.joinpths(self.deps_dir, "pip-downloaded")
     # NOTE(aababilov): user could have changed persona, so,
     # check that all requirements are downloaded
     if sh.isfile(downloaded_flag_file) and self._requirements_satisfied(
             raw_pips_to_download, self.download_dir):
         LOG.info("All python dependencies have been already downloaded")
     else:
         pip_dir = sh.joinpths(self.deps_dir, "pip")
         pip_download_dir = sh.joinpths(pip_dir, "download")
         pip_build_dir = sh.joinpths(pip_dir, "build")
         # NOTE(aababilov): do not clean the cache, it is always useful
         pip_cache_dir = sh.joinpths(self.deps_dir, "pip-cache")
         pip_failures = []
         for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS):
             # NOTE(aababilov): pip has issues with already downloaded files
             sh.deldir(pip_dir)
             sh.mkdir(pip_download_dir, recurse=True)
             header = "Downloading %s python dependencies (attempt %s)"
             header = header % (len(raw_pips_to_download), attempt)
             utils.log_iterable(sorted(raw_pips_to_download),
                                logger=LOG,
                                header=header)
             failed = False
             try:
                 self._try_download_dependencies(attempt, raw_pips_to_download,
                                                 pip_download_dir,
                                                 pip_cache_dir, pip_build_dir)
                 pip_failures = []
             except exc.ProcessExecutionError as e:
                 LOG.exception("Failed downloading python dependencies")
                 pip_failures.append(e)
                 failed = True
             if not failed:
                 break
         for filename in sh.listdir(pip_download_dir, files_only=True):
             sh.move(filename, self.download_dir, force=True)
         sh.deldir(pip_dir)
         if pip_failures:
             raise pip_failures[-1]
         with open(downloaded_flag_file, "w"):
             pass
     pips_downloaded = [pip_helper.extract_requirement(p)
                        for p in raw_pips_to_download]
     self._examine_download_dir(pips_downloaded, self.download_dir)
     what_downloaded = sh.listdir(self.download_dir, files_only=True)
     return (pips_downloaded, what_downloaded)
Esempio n. 2
0
    def download_dependencies(self, clear_cache=False):
        """Download dependencies from `$deps_dir/download-requires`.

        :param clear_cache: clear `$deps_dir/cache` dir (pip can work incorrectly
            when it has a cache)
        """
        sh.deldir(self.download_dir)
        sh.mkdir(self.download_dir, recurse=True)
        download_requires_filename = sh.joinpths(self.deps_dir,
                                                 "download-requires")
        raw_pips_to_download = self.filter_download_requires()
        pips_to_download = [
            pkg_resources.Requirement.parse(str(p.strip()))
            for p in raw_pips_to_download if p.strip()
        ]
        sh.write_file(download_requires_filename,
                      "\n".join(str(req) for req in pips_to_download))
        if not pips_to_download:
            return []
        pip_dir = sh.joinpths(self.deps_dir, "pip")
        pip_download_dir = sh.joinpths(pip_dir, "download")
        pip_build_dir = sh.joinpths(pip_dir, "build")
        pip_cache_dir = sh.joinpths(pip_dir, "cache")
        if clear_cache:
            sh.deldir(pip_cache_dir)
        pip_failures = []
        how_many = len(pips_to_download)
        for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS):
            # NOTE(aababilov): pip has issues with already downloaded files
            sh.deldir(pip_download_dir)
            sh.mkdir(pip_download_dir, recurse=True)
            sh.deldir(pip_build_dir)
            utils.log_iterable(sorted(raw_pips_to_download),
                               logger=LOG,
                               header=("Downloading %s python dependencies "
                                       "(attempt %s)" % (how_many, attempt)))
            failed = False
            try:
                self._try_download_dependencies(attempt, pips_to_download,
                                                pip_download_dir,
                                                pip_cache_dir, pip_build_dir)
                pip_failures = []
            except exc.ProcessExecutionError as e:
                LOG.exception("Failed downloading python dependencies")
                pip_failures.append(e)
                failed = True
            if not failed:
                break
        if pip_failures:
            raise pip_failures[-1]
        for filename in sh.listdir(pip_download_dir, files_only=True):
            sh.move(filename, self.download_dir)
        return sh.listdir(self.download_dir, files_only=True)
Esempio n. 3
0
 def download_dependencies(self):
     """Download dependencies from `$deps_dir/download-requires`."""
     # NOTE(aababilov): do not drop download_dir - it can be reused
     sh.mkdirslist(self.download_dir, tracewriter=self.tracewriter)
     pips_to_download = self._filter_download_requires()
     sh.write_file(self.download_requires_filename,
                   "\n".join([str(req) for req in pips_to_download]))
     if not pips_to_download:
         return ([], [])
     # NOTE(aababilov): user could have changed persona, so,
     # check that all requirements are downloaded
     if (sh.isfile(self.downloaded_flag_file)
             and self._requirements_satisfied(pips_to_download,
                                              self.download_dir)):
         LOG.info("All python dependencies have been already downloaded")
     else:
         pip_failures = []
         for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS):
             # NOTE(aababilov): pip has issues with already downloaded files
             for filename in sh.listdir(self.download_dir, files_only=True):
                 sh.unlink(filename)
             header = "Downloading %s python dependencies (attempt %s)"
             header = header % (len(pips_to_download), attempt + 1)
             utils.log_iterable(sorted(pips_to_download),
                                logger=LOG,
                                header=header)
             failed = False
             try:
                 self._try_download_dependencies(attempt + 1,
                                                 pips_to_download,
                                                 self.download_dir)
                 pip_failures = []
             except exc.ProcessExecutionError as e:
                 LOG.exception("Failed downloading python dependencies")
                 pip_failures.append(e)
                 failed = True
             if not failed:
                 break
         if pip_failures:
             raise pip_failures[-1]
         # NOTE(harlowja): Mark that we completed downloading successfully
         sh.touch_file(self.downloaded_flag_file,
                       die_if_there=False,
                       quiet=True,
                       tracewriter=self.tracewriter)
     pips_downloaded = [
         pip_helper.extract_requirement(p) for p in pips_to_download
     ]
     self._examine_download_dir(pips_downloaded, self.download_dir)
     return (pips_downloaded, sh.listdir(self.download_dir,
                                         files_only=True))
Esempio n. 4
0
    def download_dependencies(self, clear_cache=False):
        """Download dependencies from `$deps_dir/download-requires`.

        :param clear_cache: clear `$deps_dir/cache` dir (pip can work incorrectly
            when it has a cache)
        """
        sh.deldir(self.download_dir)
        sh.mkdir(self.download_dir, recurse=True)
        download_requires_filename = sh.joinpths(self.deps_dir,
                                                 "download-requires")
        raw_pips_to_download = self.filter_download_requires()
        pips_to_download = [pkg_resources.Requirement.parse(str(p.strip()))
                            for p in raw_pips_to_download if p.strip()]
        sh.write_file(download_requires_filename,
                      "\n".join(str(req) for req in pips_to_download))
        if not pips_to_download:
            return []
        pip_dir = sh.joinpths(self.deps_dir, "pip")
        pip_download_dir = sh.joinpths(pip_dir, "download")
        pip_build_dir = sh.joinpths(pip_dir, "build")
        pip_cache_dir = sh.joinpths(pip_dir, "cache")
        if clear_cache:
            sh.deldir(pip_cache_dir)
        pip_failures = []
        how_many = len(pips_to_download)
        for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS):
            # NOTE(aababilov): pip has issues with already downloaded files
            sh.deldir(pip_download_dir)
            sh.mkdir(pip_download_dir, recurse=True)
            sh.deldir(pip_build_dir)
            utils.log_iterable(sorted(raw_pips_to_download),
                               logger=LOG,
                               header=("Downloading %s python dependencies "
                                       "(attempt %s)" % (how_many, attempt)))
            failed = False
            try:
                self._try_download_dependencies(attempt, pips_to_download,
                                                pip_download_dir,
                                                pip_cache_dir, pip_build_dir)
                pip_failures = []
            except exc.ProcessExecutionError as e:
                LOG.exception("Failed downloading python dependencies")
                pip_failures.append(e)
                failed = True
            if not failed:
                break
        if pip_failures:
            raise pip_failures[-1]
        for filename in sh.listdir(pip_download_dir, files_only=True):
            sh.move(filename, self.download_dir)
        return sh.listdir(self.download_dir, files_only=True)
Esempio n. 5
0
 def _move_rpm_files(source_dir, target_dir):
     if not sh.isdir(source_dir):
         return
     for filename in sh.listdir(source_dir, recursive=True, files_only=True):
         if not filename.lower().endswith(".rpm"):
             continue
         sh.move(filename, target_dir, force=True)
Esempio n. 6
0
def download_dependencies(download_dir, pips_to_download, output_filename):
    if not pips_to_download:
        return
    # NOTE(aababilov): pip has issues with already downloaded files
    if sh.isdir(download_dir):
        for filename in sh.listdir(download_dir, files_only=True):
            sh.unlink(filename)
    else:
        sh.mkdir(download_dir)
    # Clean out any previous paths that we don't want around.
    build_path = sh.joinpths(download_dir, ".build")
    if sh.isdir(build_path):
        sh.deldir(build_path)
    sh.mkdir(build_path)
    cmdline = [
        PIP_EXECUTABLE, '-v',
        'install', '-I', '-U',
        '--download', download_dir,
        '--build', build_path,
        # Don't download wheels since we lack the ability to create
        # rpms from them (until future when we will have it, if ever)...
        "--no-use-wheel",
    ]
    for p in pips_to_download:
        for p_seg in _split(p):
            if p_seg:
                cmdline.append(p_seg)
    sh.execute_save_output(cmdline, output_filename)
Esempio n. 7
0
File: venv.py Progetto: y2kbot/anvil
    def package_finish(self):
        super(VenvDependencyHandler, self).package_finish()
        for instance in self.instances:
            if not self._is_buildable(instance):
                continue
            venv_dir = sh.abspth(self._venv_directory_for(instance))

            # Replace paths with virtualenv deployment directory.
            if self.opts.get('venv_deploy_dir'):
                deploy_dir = sh.joinpths(self.opts.get('venv_deploy_dir'),
                                         instance.name)
                replacer = functools.partial(
                    re.subn, re.escape(instance.get_option('component_dir')),
                    deploy_dir)
                bin_dir = sh.joinpths(venv_dir, 'bin')
                adjustments, files_replaced = self._replace_deployment_paths(bin_dir,
                                                                             replacer)
                if files_replaced:
                    LOG.info("Adjusted %s deployment path(s) in %s files",
                             adjustments, files_replaced)

            # Create a tarball containing the virtualenv.
            tar_filename = sh.joinpths(venv_dir, '%s-venv.tar.gz' % instance.name)
            LOG.info("Making tarball of %s built for %s at %s", venv_dir,
                     instance.name, tar_filename)
            with contextlib.closing(tarfile.open(tar_filename, "w:gz")) as tfh:
                for path in sh.listdir(venv_dir, recursive=True):
                    tfh.add(path, recursive=False, arcname=path[len(venv_dir):])
Esempio n. 8
0
def download_dependencies(download_dir, pips_to_download, output_filename):
    if not pips_to_download:
        return
    # NOTE(aababilov): pip has issues with already downloaded files
    if sh.isdir(download_dir):
        for filename in sh.listdir(download_dir, files_only=True):
            sh.unlink(filename)
    else:
        sh.mkdir(download_dir)
    # Clean out any previous paths that we don't want around.
    build_path = sh.joinpths(download_dir, ".build")
    if sh.isdir(build_path):
        sh.deldir(build_path)
    sh.mkdir(build_path)
    cmdline = [
        PIP_EXECUTABLE,
        '-v',
        'install',
        '-I',
        '-U',
        '--download',
        download_dir,
        '--build',
        build_path,
        # Don't download wheels since we lack the ability to create
        # rpms from them (until future when we will have it, if ever)...
        "--no-use-wheel",
    ]
    for p in pips_to_download:
        for p_seg in _split(p):
            if p_seg:
                cmdline.append(p_seg)
    sh.execute_save_output(cmdline, output_filename)
Esempio n. 9
0
def download_dependencies(download_dir, pips_to_download, output_filename):
    if not pips_to_download:
        return
    # NOTE(aababilov): pip has issues with already downloaded files
    if sh.isdir(download_dir):
        for filename in sh.listdir(download_dir, files_only=True):
            sh.unlink(filename)
    else:
        sh.mkdir(download_dir)
    # Clean out any previous paths that we don't want around.
    build_path = sh.joinpths(download_dir, ".build")
    if sh.isdir(build_path):
        sh.deldir(build_path)
    sh.mkdir(build_path)
    # Ensure certain directories exist that we want to exist (but we don't
    # want to delete them run after run).
    cache_path = sh.joinpths(download_dir, ".cache")
    if not sh.isdir(cache_path):
        sh.mkdir(cache_path)
    cmdline = [
        PIP_EXECUTABLE, '-v',
        'install', '-I', '-U',
        '--download', download_dir,
        '--build', build_path,
        '--download-cache', cache_path,
    ]
    # Don't download wheels...
    #
    # See: https://github.com/pypa/pip/issues/1439
    if dist_version.StrictVersion(PIP_VERSION) >= dist_version.StrictVersion('1.5'):
        cmdline.append("--no-use-wheel")
    cmdline.extend([str(p) for p in pips_to_download])
    sh.execute_save_output(cmdline, output_filename)
Esempio n. 10
0
 def _move_rpm_files(source_dir, target_dir):
     if not sh.isdir(source_dir):
         return
     for filename in sh.listdir(source_dir, recursive=True,
                                files_only=True):
         if not filename.lower().endswith(".rpm"):
             continue
         sh.move(filename, target_dir, force=True)
Esempio n. 11
0
    def build_binary(self):
        def is_src_rpm(path):
            if not path:
                return False
            if not sh.isfile(path):
                return False
            if not path.lower().endswith('.src.rpm'):
                return False
            return True

        def list_src_rpms(path):
            path_files = []
            if sh.isdir(path):
                path_files = sh.listdir(path, filter_func=is_src_rpm)
            return sorted(path_files)

        build_requirements = self.requirements.get("build-requires")
        if build_requirements:
            utils.log_iterable(build_requirements,
                               header="Installing build requirements",
                               logger=LOG)
            self.helper.transaction(install_pkgs=build_requirements,
                                    tracewriter=self.tracewriter)

        for repo_name in self.REPOS:
            src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name])
            src_repo_files = list_src_rpms(src_repo_dir)
            if not src_repo_files:
                continue
            utils.log_iterable(src_repo_files,
                               header=('Building %s RPM packages from their'
                                      ' SRPMs for repo %s using %s jobs') %
                                      (len(src_repo_files), self.SRC_REPOS[repo_name], self._jobs),
                               logger=LOG)
            makefile_path = sh.joinpths(self.deps_dir, "binary-%s.mk" % repo_name)
            marks_dir = sh.joinpths(self.deps_dir, "marks-binary")
            if not sh.isdir(marks_dir):
                sh.mkdirslist(marks_dir, tracewriter=self.tracewriter)
            rpmbuild_flags = "--rebuild"
            if self.opts.get("usr_only", False):
                rpmbuild_flags += " --define 'usr_only 1'"
            params = {
                "SRC_REPO_DIR": src_repo_dir,
                "RPMBUILD_FLAGS": rpmbuild_flags,
                "LOGS_DIR": self.log_dir,
                'RPMTOP_DIR': self.rpmbuild_dir,
            }
            (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "binary.mk")
            sh.write_file(makefile_path, utils.expand_template(content, params),
                          tracewriter=self.tracewriter)
            with sh.remove_before_after(self.rpmbuild_dir):
                self._create_rpmbuild_subdirs()
                self._execute_make(makefile_path, marks_dir)
                repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name)
                for d in sh.listdir(self.rpmbuild_dir, dirs_only=True):
                    self._move_rpm_files(sh.joinpths(d, "RPMS"), repo_dir)
                self._move_rpm_files(sh.joinpths(self.rpmbuild_dir, "RPMS"), repo_dir)
            self._create_repo(repo_name)
Esempio n. 12
0
 def _create_deps_repo(self):
     for filename in sh.listdir(sh.joinpths(self.rpmbuild_dir, "RPMS"),
                                recursive=True, files_only=True):
         sh.move(filename, self.deps_repo_dir, force=True)
     for filename in sh.listdir(sh.joinpths(self.rpmbuild_dir, "SRPMS"),
                                recursive=True, files_only=True):
         sh.move(filename, self.deps_src_repo_dir, force=True)
     for repo_dir in self.deps_repo_dir, self.deps_src_repo_dir:
         cmdline = ["createrepo", repo_dir]
         LOG.info("Creating repo at %s" % repo_dir)
         sh.execute(cmdline)
     LOG.info("Writing %s to %s", self.REPO_FN, self.anvil_repo_filename)
     (_fn, content) = utils.load_template('packaging', self.REPO_FN)
     params = {"baseurl_bin": "file://%s" % self.deps_repo_dir,
               "baseurl_src": "file://%s" % self.deps_src_repo_dir}
     sh.write_file(self.anvil_repo_filename,
                   utils.expand_template(content, params),
                   tracewriter=self.tracewriter)
Esempio n. 13
0
 def _replace_deployment_paths(self, root_dir, replacer):
     total_replacements = 0
     files_replaced = 0
     for path in sh.listdir(root_dir, recursive=True, files_only=True):
         new_contents, replacements = replacer(sh.load_file(path))
         if replacements:
             sh.write_file(path, new_contents)
             total_replacements += replacements
             files_replaced += 1
     return (files_replaced, total_replacements)
Esempio n. 14
0
File: venv.py Progetto: jzako/anvil
 def _replace_deployment_paths(self, root_dir, replacer):
     total_replacements = 0
     files_replaced = 0
     for path in sh.listdir(root_dir, recursive=True, files_only=True):
         new_contents, replacements = replacer(sh.load_file(path))
         if replacements:
             sh.write_file(path, new_contents)
             total_replacements += replacements
             files_replaced += 1
     return (files_replaced, total_replacements)
Esempio n. 15
0
 def _examine_download_dir(self, pips_to_download, pip_download_dir):
     pip_names = set([p.key for p in pips_to_download])
     what_downloaded = sh.listdir(pip_download_dir, files_only=True)
     LOG.info("Validating %s files that were downloaded.", len(what_downloaded))
     for filename in what_downloaded:
         pkg_details = pip_helper.get_archive_details(filename)
         req = pkg_details['req']
         if req.key not in pip_names:
             LOG.info("Dependency %s was automatically included.",
                      colorizer.quote(req))
Esempio n. 16
0
 def download_dependencies(self):
     """Download dependencies from `$deps_dir/download-requires`."""
     # NOTE(aababilov): do not drop download_dir - it can be reused
     sh.mkdirslist(self.download_dir, tracewriter=self.tracewriter)
     pips_to_download = self._filter_download_requires()
     sh.write_file(self.download_requires_filename,
                   "\n".join([str(req) for req in pips_to_download]))
     if not pips_to_download:
         return ([], [])
     # NOTE(aababilov): user could have changed persona, so,
     # check that all requirements are downloaded
     if (sh.isfile(self.downloaded_flag_file) and
         self._requirements_satisfied(pips_to_download, self.download_dir)):
         LOG.info("All python dependencies have been already downloaded")
     else:
         pip_failures = []
         for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS):
             # NOTE(aababilov): pip has issues with already downloaded files
             for filename in sh.listdir(self.download_dir, files_only=True):
                 sh.unlink(filename)
             header = "Downloading %s python dependencies (attempt %s)"
             header = header % (len(pips_to_download), attempt + 1)
             utils.log_iterable(sorted(pips_to_download), logger=LOG, header=header)
             failed = False
             try:
                 self._try_download_dependencies(attempt + 1, pips_to_download,
                                                 self.download_dir)
                 pip_failures = []
             except exc.ProcessExecutionError as e:
                 LOG.exception("Failed downloading python dependencies")
                 pip_failures.append(e)
                 failed = True
             if not failed:
                 break
         if pip_failures:
             raise pip_failures[-1]
         # NOTE(harlowja): Mark that we completed downloading successfully
         sh.touch_file(self.downloaded_flag_file, die_if_there=False,
                       quiet=True, tracewriter=self.tracewriter)
     pips_downloaded = [pip_helper.extract_requirement(p) for p in pips_to_download]
     self._examine_download_dir(pips_downloaded, self.download_dir)
     return (pips_downloaded, sh.listdir(self.download_dir, files_only=True))
Esempio n. 17
0
 def _examine_download_dir(self, pips_to_download, pip_download_dir):
     pip_names = set([p.key for p in pips_to_download])
     what_downloaded = sh.listdir(pip_download_dir, files_only=True)
     LOG.info("Validating %s files that were downloaded.",
              len(what_downloaded))
     for filename in what_downloaded:
         pkg_details = pip_helper.get_archive_details(filename)
         req = pkg_details['req']
         if req.key not in pip_names:
             LOG.info("Dependency %s was automatically included.",
                      colorizer.quote(req))
Esempio n. 18
0
File: yum.py Progetto: jzako/anvil
 def move_rpms(repo_name):
     repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name)
     search_dirs = [
         sh.joinpths(self.rpmbuild_dir, "RPMS"),
     ]
     for sub_dir in sh.listdir(self.rpmbuild_dir, dirs_only=True):
         search_dirs.append(sh.joinpths(sub_dir, "RPMS"))
     moved = []
     for source_dir in search_dirs:
         moved.extend(self._move_rpm_files(source_dir, repo_dir))
     return moved
Esempio n. 19
0
 def _move_rpm_files(self, source_dir, target_dir):
     # NOTE(imelnikov): we should create target_dir even if we have
     #  nothing to move, because later we rely on its existence
     if not sh.isdir(target_dir):
         sh.mkdirslist(target_dir, tracewriter=self.tracewriter)
     if not sh.isdir(source_dir):
         return
     for filename in sh.listdir(source_dir, recursive=True, files_only=True):
         if not filename.lower().endswith(".rpm"):
             continue
         sh.move(filename, target_dir, force=True)
Esempio n. 20
0
 def _move_rpm_files(self, source_dir, target_dir):
     # NOTE(imelnikov): we should create target_dir even if we have
     #  nothing to move, because later we rely on its existence
     if not sh.isdir(target_dir):
         sh.mkdirslist(target_dir, tracewriter=self.tracewriter)
     if not sh.isdir(source_dir):
         return
     for filename in sh.listdir(source_dir, recursive=True,
                                files_only=True):
         if not filename.lower().endswith(".rpm"):
             continue
         sh.move(filename, target_dir, force=True)
Esempio n. 21
0
def expand_patches(paths, patch_ext='.patch'):
    if not paths:
        return []
    all_paths = []
    # Expand patch files/dirs
    for path in paths:
        path = sh.abspth(path)
        if sh.isdir(path):
            all_paths.extend([p for p in sh.listdir(path, files_only=True)])
        else:
            all_paths.append(path)
    # Now filter on valid patches
    return [p for p in all_paths if _is_patch(p, patch_ext=patch_ext)]
Esempio n. 22
0
 def _create_deps_repo(self):
     for filename in sh.listdir(sh.joinpths(self.rpmbuild_dir, "RPMS"),
                                recursive=True,
                                files_only=True):
         sh.move(filename, self.deps_repo_dir, force=True)
     for filename in sh.listdir(sh.joinpths(self.rpmbuild_dir, "SRPMS"),
                                recursive=True,
                                files_only=True):
         sh.move(filename, self.deps_src_repo_dir, force=True)
     for repo_dir in self.deps_repo_dir, self.deps_src_repo_dir:
         cmdline = ["createrepo", repo_dir]
         LOG.info("Creating repo at %s" % repo_dir)
         sh.execute(cmdline)
     LOG.info("Writing %s to %s", self.REPO_FN, self.anvil_repo_filename)
     (_fn, content) = utils.load_template('packaging', self.REPO_FN)
     params = {
         "baseurl_bin": "file://%s" % self.deps_repo_dir,
         "baseurl_src": "file://%s" % self.deps_src_repo_dir
     }
     sh.write_file(self.anvil_repo_filename,
                   utils.expand_template(content, params),
                   tracewriter=self.tracewriter)
Esempio n. 23
0
def expand_patches(paths):
    if not paths:
        return []
    all_paths = []
    # Expand patch files/dirs
    for path in paths:
        path = sh.abspth(path)
        if sh.isdir(path):
            all_paths.extend([p for p in sh.listdir(path, files_only=True)])
        else:
            all_paths.append(path)
    # Now filter on valid patches
    return [p for p in all_paths if _is_patch(p)]
Esempio n. 24
0
 def patch(self, section):
     what_patches = self.get_option('patches', section)
     (_from_uri, target_dir) = self._get_download_location()
     if not what_patches:
         what_patches = []
     canon_what_patches = []
     for path in what_patches:
         if sh.isdir(path):
             canon_what_patches.extend(sorted(sh.listdir(path, files_only=True)))
         elif sh.isfile(path):
             canon_what_patches.append(path)
     if canon_what_patches:
         patcher.apply_patches(canon_what_patches, target_dir)
Esempio n. 25
0
 def patch(self, section):
     what_patches = self.get_option('patches', section)
     (_from_uri, target_dir) = self._get_download_location()
     if not what_patches:
         what_patches = []
     canon_what_patches = []
     for path in what_patches:
         if sh.isdir(path):
             canon_what_patches.extend(sh.listdir(path, files_only=True))
         elif sh.isfile(path):
             canon_what_patches.append(path)
     if canon_what_patches:
         patcher.apply_patches(canon_what_patches, target_dir)
Esempio n. 26
0
 def list_patches(self, section):
     what_patches = self.get_option('patches', section)
     if not what_patches:
         what_patches = []
     canon_what_patches = []
     for path in what_patches:
         if sh.isdir(path):
             patches = sorted(fn for fn in sh.listdir(path, files_only=True)
                              if fn.endswith('patch'))
             canon_what_patches.extend(patches)
         elif sh.isfile(path):
             canon_what_patches.append(path)
     return canon_what_patches
Esempio n. 27
0
    def build_binary(self):
        def is_src_rpm(path):
            if not path:
                return False
            if not sh.isfile(path):
                return False
            if not path.lower().endswith('.src.rpm'):
                return False
            return True

        def list_src_rpms(path):
            path_files = []
            if sh.isdir(path):
                path_files = sh.listdir(path, filter_func=is_src_rpm)
            return sorted(path_files)

        build_requirements = self.requirements.get("build-requires")
        if build_requirements:
            utils.log_iterable(build_requirements,
                               header="Installing build requirements",
                               logger=LOG)
            self.helper.transaction(install_pkgs=build_requirements,
                                    tracewriter=self.tracewriter)

        for repo_name in self.REPOS:
            src_repo_dir = sh.joinpths(self.anvil_repo_dir,
                                       self.SRC_REPOS[repo_name])
            src_repo_files = list_src_rpms(src_repo_dir)
            if not src_repo_files:
                continue
            utils.log_iterable(
                src_repo_files,
                header=('Building %s RPM packages from their'
                        ' SRPMs for repo %s using %s jobs') %
                (len(src_repo_files), self.SRC_REPOS[repo_name], self.jobs),
                logger=LOG)
            rpmbuild_flags = "--rebuild"
            if self.opts.get("usr_only", False):
                rpmbuild_flags += " --define 'usr_only 1'"
            with sh.remove_before_after(self.rpmbuild_dir):
                self._create_rpmbuild_subdirs()
                self.py2rpm_helper.build_all_binaries(repo_name, src_repo_dir,
                                                      rpmbuild_flags,
                                                      self.tracewriter,
                                                      self.jobs)
                repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name)
                for d in sh.listdir(self.rpmbuild_dir, dirs_only=True):
                    self._move_rpm_files(sh.joinpths(d, "RPMS"), repo_dir)
                self._move_rpm_files(sh.joinpths(self.rpmbuild_dir, "RPMS"),
                                     repo_dir)
            self._create_repo(repo_name)
Esempio n. 28
0
    def build_binary(self):

        def _install_build_requirements():
            build_requires = self.requirements["build-requires"]
            if build_requires:
                utils.log_iterable(sorted(build_requires),
                                   header=("Installing %s build requirements" % len(build_requires)),
                                   logger=LOG)
                cmdline = ["yum", "install", "-y"] + list(build_requires)
                sh.execute(cmdline)

        def _is_src_rpm(filename):
            return filename.endswith('.src.rpm')

        _install_build_requirements()

        for repo_name in self.REPOS:
            repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name)
            sh.mkdirslist(repo_dir, tracewriter=self.tracewriter)
            src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name])
            if sh.isdir(src_repo_dir):
                src_repo_files = sh.listdir(src_repo_dir, files_only=True)
                src_repo_files = sorted([f for f in src_repo_files if _is_src_rpm(f)])
            else:
                src_repo_files = []
            if not src_repo_files:
                continue
            src_repo_base_files = [sh.basename(f) for f in src_repo_files]
            LOG.info('Building %s RPM packages from their SRPMs for repo %s using %s jobs',
                     len(src_repo_files), self.SRC_REPOS[repo_name], self.jobs)
            makefile_name = sh.joinpths(self.deps_dir, "binary-%s.mk" % repo_name)
            marks_dir = sh.joinpths(self.deps_dir, "marks-binary")
            sh.mkdirslist(marks_dir, tracewriter=self.tracewriter)
            (_fn, content) = utils.load_template("packaging/makefiles", "binary.mk")
            rpmbuild_flags = ("--rebuild --define '_topdir %s'" % self.rpmbuild_dir)
            if self.opts.get("usr_only", False):
                rpmbuild_flags += "--define 'usr_only 1'"
            params = {
                "SRC_REPO_DIR": src_repo_dir,
                "RPMBUILD_FLAGS": rpmbuild_flags,
                "LOGS_DIR": self.log_dir,
            }
            sh.write_file(makefile_name,
                          utils.expand_template(content, params),
                          tracewriter=self.tracewriter)
            with sh.remove_before_after(self.rpmbuild_dir):
                self._create_rpmbuild_subdirs()
                self._execute_make(makefile_name, marks_dir)
                self._move_files(sh.joinpths(self.rpmbuild_dir, "RPMS"),
                                 repo_dir)
            self._create_repo(repo_name)
Esempio n. 29
0
 def _requirements_satisfied(pips_list, download_dir):
     downloaded_req = [pip_helper.get_archive_details(filename)["req"]
                       for filename in sh.listdir(download_dir, files_only=True)]
     downloaded_req = dict((req.key, req.specs[0][1]) for req in downloaded_req)
     for req_str in pips_list:
         req = pip_helper.extract_requirement(req_str)
         try:
             downloaded_version = downloaded_req[req.key]
         except KeyError:
             return False
         else:
             if downloaded_version not in req:
                 return False
     return True
Esempio n. 30
0
 def list_patches(self, section):
     what_patches = self.get_option('patches', section)
     if not what_patches:
         what_patches = [sh.joinpths(settings.CONFIG_DIR, 'patches',
                                     self.name, section)]
     canon_what_patches = []
     for path in what_patches:
         if sh.isdir(path):
             patches = sorted(fn for fn in sh.listdir(path, files_only=True)
                              if fn.endswith('patch'))
             canon_what_patches.extend(patches)
         elif sh.isfile(path):
             canon_what_patches.append(path)
     return canon_what_patches
Esempio n. 31
0
 def _unpack_dir(self, dir_path):
     """Pick through a directory to figure out which files are which
     image pieces, and create a dict that describes them.
     """
     potential_files = set()
     for fn in self._filter_files(sh.listdir(dir_path)):
         full_fn = sh.joinpths(dir_path, fn)
         if sh.isfile(full_fn):
             potential_files.add(sh.canon_path(full_fn))
     (root_fn, ramdisk_fn, kernel_fn) = self._find_pieces(potential_files, dir_path)
     if not root_fn:
         msg = "Directory %r has no root image member" % (dir_path)
         raise IOError(msg)
     self._log_pieces_found('directory', root_fn, ramdisk_fn, kernel_fn)
     return self._describe(root_fn, ramdisk_fn, kernel_fn)
Esempio n. 32
0
 def _patches(self):
     your_patches = []
     in_patches = self.get_option('patches', 'package')
     if in_patches:
         for path in in_patches:
             path = sh.abspth(path)
             if sh.isdir(path):
                 for c_path in sh.listdir(path, files_only=True):
                     tgt_fn = sh.joinpths(self.build_paths['sources'], sh.basename(c_path))
                     sh.copy(c_path, tgt_fn)
                     your_patches.append(sh.basename(tgt_fn))
             else:
                 tgt_fn = sh.joinpths(self.build_paths['sources'], sh.basename(path))
                 sh.copy(path, tgt_fn)
                 your_patches.append(sh.basename(tgt_fn))
     return your_patches
Esempio n. 33
0
File: yum.py Progetto: hshah19/anvil
    def build_binary(self):
        def is_src_rpm(path):
            if not path:
                return False
            if not sh.isfile(path):
                return False
            if not path.lower().endswith('.src.rpm'):
                return False
            return True

        def list_src_rpms(path):
            path_files = []
            if sh.isdir(path):
                path_files = sh.listdir(path, filter_func=is_src_rpm)
            return sorted(path_files)

        build_requirements = self.requirements.get("build-requires")
        if build_requirements:
            utils.log_iterable(build_requirements,
                               header="Installing build requirements",
                               logger=LOG)
            self.helper.transaction(install_pkgs=build_requirements,
                                    tracewriter=self.tracewriter)

        for repo_name in self.REPOS:
            src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name])
            src_repo_files = list_src_rpms(src_repo_dir)
            if not src_repo_files:
                continue
            utils.log_iterable(src_repo_files,
                               header=('Building %s RPM packages from their'
                                       ' SRPMs for repo %s using %s jobs') %
                                      (len(src_repo_files), self.SRC_REPOS[repo_name], self.jobs),
                               logger=LOG)
            rpmbuild_flags = "--rebuild"
            if self.opts.get("usr_only", False):
                rpmbuild_flags += " --define 'usr_only 1'"
            with sh.remove_before_after(self.rpmbuild_dir):
                self._create_rpmbuild_subdirs()
                self.py2rpm_helper.build_all_binaries(repo_name, src_repo_dir,
                                                      rpmbuild_flags, self.tracewriter,
                                                      self.jobs)
                repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name)
                for d in sh.listdir(self.rpmbuild_dir, dirs_only=True):
                    self._move_rpm_files(sh.joinpths(d, "RPMS"), repo_dir)
                self._move_rpm_files(sh.joinpths(self.rpmbuild_dir, "RPMS"), repo_dir)
            self._create_repo(repo_name)
Esempio n. 34
0
File: yum.py Progetto: jzako/anvil
 def list_src_rpms(path):
     path_files = []
     restricted = set()
     if sh.isdir(path):
         path_files = sh.listdir(path, filter_func=is_src_rpm)
     try:
         # Leave other groups files alone...
         restricted = set(_get_lines(self.generated_srpms_filename))
     except IOError as e:
         if e.errno != errno.ENOENT:
             raise
     filtered = []
     for path in path_files:
         if path in restricted:
             filtered.append(path)
     path_files = filtered
     return sorted(path_files)
Esempio n. 35
0
 def _fix_log_dir(self):
     # This seems needed...
     #
     # Due to the following:
     # <<< Restarting rabbitmq-server: RabbitMQ is not running
     # <<< sh: /var/log/rabbitmq/startup_log: Permission denied
     # <<< FAILED - check /var/log/rabbitmq/startup_{log, _err}
     #
     # See: http://lists.rabbitmq.com/pipermail/rabbitmq-discuss/2011-March/011916.html
     # This seems like a bug, since we are just using service init and service restart...
     # And not trying to run this service directly...
     base_dir = sh.joinpths("/var/log", 'rabbitmq')
     if sh.isdir(base_dir):
         # Seems like we need root perms to list that directory...
         for fn in sh.listdir(base_dir):
             if re.match("(.*?)(err|log)$", fn, re.I):
                 sh.chmod(sh.joinpths(base_dir, fn), 0o666)
Esempio n. 36
0
 def _requirements_satisfied(pips_list, download_dir):
     downloaded_req = [
         pip_helper.get_archive_details(filename)["req"]
         for filename in sh.listdir(download_dir, files_only=True)
     ]
     downloaded_req = dict(
         (req.key, req.specs[0][1]) for req in downloaded_req)
     for req_str in pips_list:
         req = pip_helper.extract_requirement(req_str)
         try:
             downloaded_version = downloaded_req[req.key]
         except KeyError:
             return False
         else:
             if downloaded_version not in req:
                 return False
     return True
Esempio n. 37
0
 def _copy_sources(self, instance):
     other_sources_dir = sh.joinpths(settings.TEMPLATE_DIR,
                                     "packaging", "sources", instance.name)
     if sh.isdir(other_sources_dir):
         for filename in sh.listdir(other_sources_dir, files_only=True):
             sh.copy(filename, self.rpm_sources_dir)
Esempio n. 38
0
 def list_src_rpms(path):
     path_files = []
     if sh.isdir(path):
         path_files = sh.listdir(path, filter_func=is_src_rpm)
     return sorted(path_files)
Esempio n. 39
0
    def build_binary(self):

        def _install_build_requirements():
            build_requires = self.requirements["build-requires"]
            if build_requires:
                utils.log_iterable(sorted(build_requires),
                                   header=("Installing %s build requirements" % len(build_requires)),
                                   logger=LOG)
                cmdline = ["yum", "install", "-y"] + list(build_requires)
                sh.execute(cmdline)

        def _is_src_rpm(filename):
            return filename.endswith('.src.rpm')

        _install_build_requirements()

        for repo_name in self.REPOS:
            repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name)
            sh.mkdirslist(repo_dir, tracewriter=self.tracewriter)
            src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name])
            if sh.isdir(src_repo_dir):
                src_repo_files = sh.listdir(src_repo_dir, files_only=True)
                src_repo_files = sorted([f for f in src_repo_files if _is_src_rpm(f)])
            else:
                src_repo_files = []
            if not src_repo_files:
                continue
            src_repo_base_files = [sh.basename(f) for f in src_repo_files]
            LOG.info("Installing build requirements for repo %s" % repo_name)
            # NOTE(aababilov): yum-builddep is buggy and can fail when several
            # package names are given, so, pass them one by one
            for srpm_filename in src_repo_files:
                cmdline = ["yum-builddep", "-q", "-y", srpm_filename]
                sh.execute(cmdline)
            header = 'Building %s RPM packages from their SRPMs for repo %s using %s jobs'
            header = header % (len(src_repo_files), self.SRC_REPOS[repo_name], self.jobs)
            utils.log_iterable(src_repo_base_files, header=header, logger=LOG)

            binary_makefile_name = sh.joinpths(self.deps_dir, "binary-%s.mk" % repo_name)
            marks_dir = sh.joinpths(self.deps_dir, "marks-binary")
            sh.mkdirslist(marks_dir)
            with open(binary_makefile_name, "w") as makefile:
                rpmbuild_flags = ("--rebuild --define '_topdir %s'" %
                                  self.rpmbuild_dir)
                if self.opts.get("usr_only", False):
                    rpmbuild_flags += "--define 'usr_only 1'"
                print >> makefile, "SRC_REPO_DIR :=", src_repo_dir
                print >> makefile, "RPMBUILD := rpmbuild"
                print >> makefile, "RPMBUILD_FLAGS :=", rpmbuild_flags
                print >> makefile, "LOGS_DIR :=", self.log_dir
                print >> makefile, """
%.mark: $(SRC_REPO_DIR)/%
\t@$(RPMBUILD) $(RPMBUILD_FLAGS) -- $^ &> $(LOGS_DIR)/rpmbuild-$*.log
\t@touch "$@"
\t@echo "$* is processed"
"""
                print >> makefile, "MARKS :=", " ".join(
                    "%s.mark" % sh.basename(i) for i in src_repo_files)
                print >> makefile
                print >> makefile, "all: $(MARKS)"
            with sh.remove_before_after(self.rpmbuild_dir):
                self._execute_make(binary_makefile_name, marks_dir)
                self._move_files(sh.joinpths(self.rpmbuild_dir, "RPMS"),
                                 repo_dir)
            self._create_repo(repo_name)
Esempio n. 40
0
def main():
    if len(sys.argv) < 3:
        print("%s distro_yaml root_dir ..." % sys.argv[0])
        return 1
    root_dirs = sys.argv[2:]
    yaml_fn = sh.abspth(sys.argv[1])

    requires_files = []
    for d in root_dirs:
        all_contents = sh.listdir(d, recursive=True, files_only=True)
        requires_files = [
            sh.abspth(f) for f in all_contents
            if re.search(r"(test|pip)[-]requires$", f, re.I)
        ]

    requires_files = sorted(list(set(requires_files)))
    requirements = []
    source_requirements = {}
    for fn in requires_files:
        source_requirements[fn] = []
        for req in pip_helper.parse_requirements(sh.load_file(fn)):
            requirements.append(req.key.lower().strip())
            source_requirements[fn].append(req.key.lower().strip())

    print("Comparing pips/pip2pkgs in %s to those found in %s" %
          (yaml_fn, root_dirs))
    for fn in sorted(requires_files):
        print(" + " + str(fn))

    requirements = set(requirements)
    print("All known requirements:")
    for r in sorted(requirements):
        print("+ " + str(r))

    distro_yaml = utils.load_yaml(yaml_fn)
    components = distro_yaml.get('components', {})
    all_known_names = []
    components_pips = {}
    for (c, details) in components.items():
        components_pips[c] = []
        pip2pkgs = details.get('pip_to_package', [])
        pips = details.get('pips', [])
        known_names = []
        for item in pip2pkgs:
            known_names.append(item['name'].lower().strip())
        for item in pips:
            known_names.append(item['name'].lower().strip())
        components_pips[c].extend(known_names)
        all_known_names.extend(known_names)

    all_known_names = sorted(list(set(all_known_names)))
    not_needed = []
    for n in all_known_names:
        if n not in requirements:
            not_needed.append(n)
    if not_needed:
        print("The following distro yaml mappings may not be needed:")
        for n in sorted(not_needed):
            msg = "  + %s (" % (n)
            # Find which components said they need this...
            for (c, known_names) in components_pips.items():
                if n in known_names:
                    msg += c + ","
            msg += ")"
            print(msg)
    not_found = []
    for n in requirements:
        name = n.lower().strip()
        if name not in all_known_names:
            not_found.append(name)
    not_found = sorted(list(set(not_found)))
    if not_found:
        print(
            "The following distro yaml mappings may be required but were not found:"
        )
        for n in sorted(not_found):
            msg = "  + %s" % (n)
            msg += " ("
            # Find which file/s said they need this...
            for (fn, reqs) in source_requirements.items():
                matched = False
                for r in reqs:
                    if r.lower().strip() == name:
                        matched = True
                if matched:
                    msg += fn + ","
            msg += ")"
            print(msg)
    return len(not_found) + len(not_needed)
Esempio n. 41
0
 def _make_tarball(self, venv_dir, tar_filename, tar_path):
     with contextlib.closing(tarfile.open(tar_filename, "w:gz")) as tfh:
         for path in sh.listdir(venv_dir, recursive=True):
             tarpath = tar_path + path[len(venv_dir):]
             tarpath = os.path.abspath(tarpath)
             tfh.add(path, recursive=False, arcname=tarpath)
Esempio n. 42
0
def main():
    if len(sys.argv) < 3:
        print("%s distro_yaml root_dir ..." % sys.argv[0])
        return 1
    root_dirs = sys.argv[2:]
    yaml_fn = sh.abspth(sys.argv[1])

    requires_files = []
    for d in root_dirs:
        all_contents = sh.listdir(d, recursive=True, files_only=True)
        requires_files = [sh.abspth(f) for f in all_contents
                          if re.search(r"(test|pip)[-]requires$", f, re.I)]

    requires_files = sorted(list(set(requires_files)))
    requirements = []
    source_requirements = {}
    for fn in requires_files:
        source_requirements[fn] = []
        for req in pip_helper.parse_requirements(sh.load_file(fn)):
            requirements.append(req.key.lower().strip())
            source_requirements[fn].append(req.key.lower().strip())

    print("Comparing pips/pip2pkgs in %s to those found in %s" % (yaml_fn, root_dirs))
    for fn in sorted(requires_files):
        print(" + " + str(fn))

    requirements = set(requirements)
    print("All known requirements:")
    for r in sorted(requirements):
        print("+ " + str(r))

    distro_yaml = utils.load_yaml(yaml_fn)
    components = distro_yaml.get('components', {})
    all_known_names = []
    components_pips = {}
    for (c, details) in components.items():
        components_pips[c] = []
        pip2pkgs = details.get('pip_to_package', [])
        pips = details.get('pips', [])
        known_names = []
        for item in pip2pkgs:
            known_names.append(item['name'].lower().strip())
        for item in pips:
            known_names.append(item['name'].lower().strip())
        components_pips[c].extend(known_names)
        all_known_names.extend(known_names)

    all_known_names = sorted(list(set(all_known_names)))
    not_needed = []
    for n in all_known_names:
        if n not in requirements:
            not_needed.append(n)
    if not_needed:
        print("The following distro yaml mappings may not be needed:")
        for n in sorted(not_needed):
            msg = "  + %s (" % (n)
            # Find which components said they need this...
            for (c, known_names) in components_pips.items():
                if n in known_names:
                    msg += c + ","
            msg += ")"
            print(msg)
    not_found = []
    for n in requirements:
        name = n.lower().strip()
        if name not in all_known_names:
            not_found.append(name)
    not_found = sorted(list(set(not_found)))
    if not_found:
        print("The following distro yaml mappings may be required but were not found:")
        for n in sorted(not_found):
            msg = "  + %s" % (n)
            msg += " ("
            # Find which file/s said they need this...
            for (fn, reqs) in source_requirements.items():
                matched = False
                for r in reqs:
                    if r.lower().strip() == name:
                        matched = True
                if matched:
                    msg += fn + ","
            msg += ")"
            print(msg)
    return len(not_found) + len(not_needed)
Esempio n. 43
0
File: venv.py Progetto: jzako/anvil
 def _make_tarball(self, venv_dir, tar_filename, tar_path):
     with contextlib.closing(tarfile.open(tar_filename, "w:gz")) as tfh:
         for path in sh.listdir(venv_dir, recursive=True):
             tarpath = tar_path + path[len(venv_dir):]
             tarpath = os.path.abspath(tarpath)
             tfh.add(path, recursive=False, arcname=tarpath)
Esempio n. 44
0
 def _move_files(source_dir, target_dir):
     if not sh.isdir(source_dir):
         return
     for filename in sh.listdir(source_dir, recursive=True, files_only=True):
         sh.move(filename, target_dir, force=True)
Esempio n. 45
0
 def _copy_sources(self, instance):
     other_sources_dir = sh.joinpths(settings.TEMPLATE_DIR, "packaging",
                                     "sources", instance.name)
     if sh.isdir(other_sources_dir):
         for filename in sh.listdir(other_sources_dir, files_only=True):
             sh.copy(filename, self.rpm_sources_dir)
Esempio n. 46
0
 def list_src_rpms(path):
     path_files = []
     if sh.isdir(path):
         path_files = sh.listdir(path, filter_func=is_src_rpm)
     return sorted(path_files)
Esempio n. 47
0
    def build_binary(self):
        def is_src_rpm(path):
            if not path:
                return False
            if not sh.isfile(path):
                return False
            if not path.lower().endswith('.src.rpm'):
                return False
            return True

        def list_src_rpms(path):
            path_files = []
            if sh.isdir(path):
                path_files = sh.listdir(path, filter_func=is_src_rpm)
            return sorted(path_files)

        build_requirements = self.requirements.get("build-requires")
        if build_requirements:
            utils.log_iterable(build_requirements,
                               header="Installing build requirements",
                               logger=LOG)
            self.helper.transaction(install_pkgs=build_requirements,
                                    tracewriter=self.tracewriter)

        for repo_name in self.REPOS:
            src_repo_dir = sh.joinpths(self.anvil_repo_dir,
                                       self.SRC_REPOS[repo_name])
            src_repo_files = list_src_rpms(src_repo_dir)
            if not src_repo_files:
                continue
            utils.log_iterable(
                src_repo_files,
                header=('Building %s RPM packages from their'
                        ' SRPMs for repo %s using %s jobs') %
                (len(src_repo_files), self.SRC_REPOS[repo_name], self._jobs),
                logger=LOG)
            makefile_path = sh.joinpths(self.deps_dir,
                                        "binary-%s.mk" % repo_name)
            marks_dir = sh.joinpths(self.deps_dir, "marks-binary")
            if not sh.isdir(marks_dir):
                sh.mkdirslist(marks_dir, tracewriter=self.tracewriter)
            rpmbuild_flags = "--rebuild"
            if self.opts.get("usr_only", False):
                rpmbuild_flags += " --define 'usr_only 1'"
            params = {
                "SRC_REPO_DIR": src_repo_dir,
                "RPMBUILD_FLAGS": rpmbuild_flags,
                "LOGS_DIR": self.log_dir,
                'RPMTOP_DIR': self.rpmbuild_dir,
            }
            (_fn, content) = utils.load_template(
                sh.joinpths("packaging", "makefiles"), "binary.mk")
            sh.write_file(makefile_path,
                          utils.expand_template(content, params),
                          tracewriter=self.tracewriter)
            with sh.remove_before_after(self.rpmbuild_dir):
                self._create_rpmbuild_subdirs()
                self._execute_make(makefile_path, marks_dir)
                repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name)
                for d in sh.listdir(self.rpmbuild_dir, dirs_only=True):
                    self._move_rpm_files(sh.joinpths(d, "RPMS"), repo_dir)
                self._move_rpm_files(sh.joinpths(self.rpmbuild_dir, "RPMS"),
                                     repo_dir)
            self._create_repo(repo_name)